gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package ualberta.g12.adventurecreator.views;
import android.app.Activity;
import android.app.SearchManager;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.Button;
import android.widget.ListView;
import android.widget.SearchView;
import android.widget.Toast;
import ualberta.g12.adventurecreator.R;
import ualberta.g12.adventurecreator.data.Story;
import ualberta.g12.adventurecreator.tasks.CacheStoryTask;
import ualberta.g12.adventurecreator.tasks.DownloadStoryTask;
import ualberta.g12.adventurecreator.tasks.DownloadTitleAuthorsTask;
import java.util.ArrayList;
import java.util.List;
/**
* Activity used for viewing any stories that have been published online and are viewed
* through the listview. The activity allows the user to browser through online stories
* as well as download them to a local cache.
*
*/
public class OnlineStoryViewActivity extends Activity implements OnItemClickListener,
OView<List<Story>> {
private Button mainButton;
private ListView listView;
private StoryAuthorMapListAdapter adapter;
private List<Story> titleAuthors;
private DownloadTitleAuthorsTask downloadTitleAuthorsTask;
private static boolean downloadMode = true;
public static final String DOWNLOAD_MODE = "download_mode";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_online_story_view);
downloadTitleAuthorsTask = new DownloadTitleAuthorsTask(getApplicationContext(), this);
loadTitleAuthors();
setUpUi();
// Lets see who started us
handleIntent(getIntent());
}
@Override
protected void onNewIntent(Intent intent) {
setIntent(intent);
handleIntent(intent);
}
private void handleIntent(Intent intent) {
if (Intent.ACTION_SEARCH.equals(intent.getAction())) {
String query = intent.getStringExtra(SearchManager.QUERY);
Intent searchIntent = new Intent(getApplicationContext(),
OnlineStorySearchActivity.class);
searchIntent.putExtra(SearchManager.QUERY, query);
searchIntent.putExtra(OnlineStoryViewActivity.DOWNLOAD_MODE, downloadMode);
startActivity(searchIntent);
}
}
private void loadTitleAuthors() {
/*
* Once online works we are going to load our list full of the title
* authors
*/
titleAuthors = new ArrayList<Story>();
downloadTitleAuthorsTask.execute(new String[] {
null
});
}
private void setUpUi() {
mainButton = (Button) findViewById(R.id.online_story_start_main_activity);
mainButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
/*
* The only time we're ever going to be called is when
* MainActivity starts us. If we finish we'll bring the user
* back to the the MainActivity
*/
finish();
}
});
listView = (ListView) findViewById(R.id.online_story_author_listview);
// Set up ListView Stuff-*
adapter = new StoryAuthorMapListAdapter(this, R.layout.listview_story_list, titleAuthors);
listView.setAdapter(adapter);
listView.setOnItemClickListener(this);
}
@Override
public void onResume() {
super.onResume();
SharedPreferences settings = getPreferences(MODE_PRIVATE);
downloadMode = settings.getBoolean(DOWNLOAD_MODE, true);
}
@Override
public void onPause() {
super.onPause();
SharedPreferences settings = getPreferences(MODE_PRIVATE);
SharedPreferences.Editor editor = settings.edit();
editor.putBoolean(DOWNLOAD_MODE, downloadMode);
// Commit them datas
editor.commit();
}
/**
* updates the stories in the online list
*
* @param current list of stories being stored online
*/
public void update(List<Story> list) {
titleAuthors.clear();
titleAuthors.addAll(list);
adapter.notifyDataSetChanged();
Toast.makeText(getApplicationContext(), "Stories Loaded " + list.size(),
Toast.LENGTH_SHORT).show();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.online_story_view, menu);
// Associate searchable configuration with the searchview
SearchManager searchManager = (SearchManager) getSystemService(Context.SEARCH_SERVICE);
SearchView searchView = (SearchView) menu.findItem(R.id.online_story_search)
.getActionView();
searchView.setSearchableInfo(searchManager.getSearchableInfo(getComponentName()));
return true;
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
// Make sure check box is checked if it needs to be
menu.findItem(R.id.online_download_mode).setChecked(downloadMode);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.online_download_mode:
boolean alreadyChecked = item.isChecked();
// Set it to the opposite of what it was previously
item.setChecked(!alreadyChecked);
// Update the flag
downloadMode = item.isChecked();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public void onItemClick(AdapterView<?> parent, View v, int pos, long id) {
// Download or stream story
Story ta = titleAuthors.get(pos);
if (downloadMode) {
// Download story lol
Toast.makeText(this, String.format("Downloading story %s", ta.getTitle()),
Toast.LENGTH_SHORT).show();
DownloadStoryTask dst = new DownloadStoryTask(getApplicationContext());
dst.execute(new Story[] {ta});
} else {
// Stream story
Toast.makeText(this, String.format("Loading story %s", ta.getTitle()),
Toast.LENGTH_SHORT)
.show();
// Send some stuff to FragmentViewActivity
CacheStoryTask cst = new CacheStoryTask(this);
cst.execute(new Story[] {ta});
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.xml;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.util.Disposer;
import com.intellij.psi.PsiFile;
import com.intellij.psi.xml.XmlFile;
import com.intellij.util.xml.impl.DomFileElementImpl;
import com.intellij.util.xml.impl.MockDomFileDescription;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
import java.util.Set;
/**
* @author peter
*/
public class DomFileDescriptionTest extends DomHardCoreTestCase {
private XmlFile myFooElementFile;
private XmlFile myBarElementFile;
private Disposable myDisposable = Disposer.newDisposable();
@Override
protected void setUp() throws Exception {
super.setUp();
myFooElementFile = (XmlFile)createFile("a.xml", "<a/>");
getDomManager().registerFileDescription(new MockDomFileDescription<FooElement>(FooElement.class, "a", myFooElementFile), myDisposable);
myBarElementFile = (XmlFile)createFile("b.xml", "<b/>");
getDomManager().registerFileDescription(new DomFileDescription<BarElement>(BarElement.class, "b") {
@Override
public boolean isMyFile(@NotNull final XmlFile file, final Module module) {
String text = myFooElementFile.getText();
return text.contains("239");
}
@Override
public boolean isAutomaticHighlightingEnabled() {
return false;
}
}, myDisposable);
assertResultsAndClear();
}
@Override
public void tearDown() throws Exception {
try {
Disposer.dispose(myDisposable);
}
finally {
myFooElementFile = null;
myBarElementFile = null;
super.tearDown();
}
}
public void testNoInitialDomnessInB() throws Throwable {
assertFalse(getDomManager().isDomFile(myBarElementFile));
assertNull(getDomManager().getFileElement(myBarElementFile));
}
public void testIsDomValue() throws Throwable {
final XmlFile file = (XmlFile)createFile("a.xml", "<b>42</b>");
getDomManager().registerFileDescription(new DomFileDescription<MyElement>(MyElement.class, "b") {
@Override
public boolean isMyFile(@NotNull final XmlFile file, final Module module) {
return /*super.isMyFile(file, module) && */file.getText().contains("239");
}
}, myDisposable);
assertFalse(getDomManager().isDomFile(file));
assertNull(getDomManager().getFileElement(file));
new WriteCommandAction(getProject()) {
@Override
protected void run(@NotNull Result result) throws Throwable {
file.getDocument().getRootTag().getValue().setText("239");
}
}.execute();
assertTrue(getDomManager().isDomFile(file));
final DomFileElementImpl<MyElement> root = getDomManager().getFileElement(file);
assertNotNull(root);
final MyElement child = root.getRootElement().getChild();
assertTrue(root.isValid());
assertTrue(child.isValid());
new WriteCommandAction(getProject()) {
@Override
protected void run(@NotNull Result result) throws Throwable {
file.getDocument().getRootTag().getValue().setText("57121");
}
}.execute();
assertFalse(getDomManager().isDomFile(file));
assertNull(getDomManager().getFileElement(file));
assertFalse(root.isValid());
assertFalse(child.isValid());
}
public void testCopyFileDescriptionFromOriginalFile() throws Throwable {
final XmlFile file = (XmlFile)createFile("a.xml", "<b>42</b>");
getDomManager().registerFileDescription(new MockDomFileDescription<MyElement>(MyElement.class, "b", file), myDisposable);
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
file.setName("b.xml");
}
});
assertTrue(getDomManager().isDomFile(file));
final XmlFile copy = (XmlFile)file.copy();
assertTrue(getDomManager().isDomFile(copy));
assertFalse(getDomManager().getFileElement(file).equals(getDomManager().getFileElement(copy)));
}
public void testDependantFileDescriptionCauseStackOverflow() throws Throwable {
final XmlFile interestingFile = (XmlFile)createFile("a.xml", "<b>42</b>");
getDomManager().registerFileDescription(new MockDomFileDescription<MyElement>(MyElement.class, "b", (XmlFile)null), myDisposable);
for (int i = 0; i < 239; i++) {
getDomManager().registerFileDescription(new MockDomFileDescription<AbstractElement>(AbstractElement.class, "b", (XmlFile)null) {
@Override
@NotNull
public Set getDependencyItems(final XmlFile file) {
getDomManager().isDomFile(interestingFile);
return super.getDependencyItems(file);
}
}, myDisposable);
}
getDomManager().isDomFile(interestingFile);
}
public void testCheckNamespace() throws Throwable {
getDomManager().registerFileDescription(new DomFileDescription<NamespacedElement>(NamespacedElement.class, "xxx", "bar"){
@Override
protected void initializeFileDescription() {
registerNamespacePolicy("foo", "bar");
}
}, myDisposable);
final PsiFile file = createFile("xxx.xml", "<xxx/>");
assertFalse(getDomManager().isDomFile(file));
new WriteCommandAction(getProject()) {
@Override
protected void run(@NotNull Result result) throws Throwable {
((XmlFile)file).getDocument().getRootTag().setAttribute("xmlns", "bar");
}
}.execute();
assertTrue(getDomManager().isDomFile(file));
}
public void testCheckDtdPublicId() throws Throwable {
getDomManager().registerFileDescription(new DomFileDescription<NamespacedElement>(NamespacedElement.class, "xxx", "bar"){
@Override
protected void initializeFileDescription() {
registerNamespacePolicy("foo", "bar");
}
}, myDisposable);
final PsiFile file = createFile("xxx.xml", "<xxx/>");
assertFalse(getDomManager().isDomFile(file));
new WriteCommandAction(getProject()) {
@Override
protected void run(@NotNull Result result) throws Throwable {
final Document document = getDocument(file);
document.insertString(0, "<!DOCTYPE xxx PUBLIC \"bar\" \"http://java.sun.com/dtd/ejb-jar_2_0.dtd\">\n");
commitDocument(document);
}
}.execute();
assertTrue(getDomManager().isDomFile(file));
}
public void testChangeCustomDomness() throws Throwable {
getDomManager().registerFileDescription(new DomFileDescription<MyElement>(MyElement.class, "xxx"){
@Override
public boolean isMyFile(@NotNull final XmlFile file, @Nullable final Module module) {
return file.getText().contains("foo");
}
}, myDisposable);
final XmlFile file = (XmlFile)createFile("xxx.xml", "<xxx zzz=\"foo\"><boy/><boy/><xxx/>");
final MyElement boy = getDomManager().getFileElement(file, MyElement.class).getRootElement().getBoys().get(0);
new WriteCommandAction(getProject()) {
@Override
protected void run(@NotNull Result result) throws Throwable {
file.getDocument().getRootTag().setAttribute("zzz", "bar");
}
}.execute();
assertFalse(getDomManager().isDomFile(file));
assertFalse(boy.isValid());
}
public interface AbstractElement extends GenericDomValue<String> {
GenericAttributeValue<String> getAttr();
}
public interface FooElement extends AbstractElement {
}
public interface BarElement extends AbstractElement {
}
public interface ZipElement extends AbstractElement {
}
public interface MyElement extends DomElement {
MyElement getChild();
List<MyElement> getBoys();
}
@Namespace("foo")
public interface NamespacedElement extends DomElement {
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vfs.encoding;
import com.intellij.concurrency.ConcurrentCollectionFactory;
import com.intellij.ide.IdeBundle;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.TransactionGuard;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.State;
import com.intellij.openapi.components.Storage;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl;
import com.intellij.openapi.fileTypes.FileTypeRegistry;
import com.intellij.openapi.fileTypes.InternalStdFileTypes;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectLocator;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.startup.StartupActivity;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileVisitor;
import com.intellij.openapi.vfs.impl.LightFilePointer;
import com.intellij.openapi.vfs.newvfs.events.VFileContentChangeEvent;
import com.intellij.openapi.vfs.newvfs.impl.VirtualFileSystemEntry;
import com.intellij.openapi.vfs.pointers.VirtualFilePointer;
import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager;
import com.intellij.ui.GuiUtils;
import com.intellij.util.Processor;
import com.intellij.util.containers.ContainerUtil;
import consulo.disposer.Disposable;
import consulo.util.collection.HashingStrategy;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import org.jdom.Element;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.stream.Collectors;
@Singleton
@State(name = "Encoding", storages = @Storage("encodings.xml"))
public final class EncodingProjectManagerImpl extends EncodingProjectManager implements PersistentStateComponent<Element>, Disposable {
private static final String PROJECT_URL = "PROJECT";
private final Project myProject;
private final EncodingManagerImpl myIdeEncodingManager;
private boolean myNative2AsciiForPropertiesFiles;
private Charset myDefaultCharsetForPropertiesFiles;
@Nullable
private Charset myDefaultConsoleCharset;
private final SimpleModificationTracker myModificationTracker = new SimpleModificationTracker();
private BOMForNewUTF8Files myBomForNewUtf8Files = BOMForNewUTF8Files.NEVER;
private final Map<VirtualFilePointer, Charset> myMapping = ConcurrentCollectionFactory.createMap(new HashingStrategy<VirtualFilePointer>() {
@Override
public int hashCode(VirtualFilePointer pointer) {
// TODO !! hashCode is unstable - VirtualFilePointer URL can change
return FileUtil.PATH_HASHING_STRATEGY.hashCode(pointer.getUrl());
}
@Override
public boolean equals(VirtualFilePointer o1, VirtualFilePointer o2) {
return FileUtil.PATH_HASHING_STRATEGY.equals(o1.getUrl(), o2.getUrl());
}
});
private volatile Charset myProjectCharset;
@Inject
public EncodingProjectManagerImpl(@Nonnull Project project) {
myProject = project;
myIdeEncodingManager = (EncodingManagerImpl)EncodingManager.getInstance();
}
static final class EncodingProjectManagerStartUpActivity implements StartupActivity.DumbAware {
@Override
public void runActivity(@Nonnull Project project) {
GuiUtils.invokeLaterIfNeeded(() -> ((EncodingProjectManagerImpl)getInstance(project)).reloadAlreadyLoadedDocuments(), ModalityState.NON_MODAL, project.getDisposed());
}
}
@Override
public void dispose() {
}
@Override
public Element getState() {
Element element = new Element("x");
if (!myMapping.isEmpty()) {
List<Map.Entry<VirtualFilePointer, Charset>> mappings = new ArrayList<>(myMapping.entrySet());
ContainerUtil.quickSort(mappings, Comparator.comparing(e -> e.getKey().getUrl()));
for (Map.Entry<VirtualFilePointer, Charset> mapping : mappings) {
VirtualFilePointer file = mapping.getKey();
Charset charset = mapping.getValue();
Element child = new Element("file");
element.addContent(child);
child.setAttribute("url", file.getUrl());
child.setAttribute("charset", charset.name());
}
}
if (myProjectCharset != null) {
Element child = new Element("file");
element.addContent(child);
child.setAttribute("url", PROJECT_URL);
child.setAttribute("charset", myProjectCharset.name());
}
if (myNative2AsciiForPropertiesFiles) {
element.setAttribute("native2AsciiForPropertiesFiles", Boolean.toString(true));
}
if (myDefaultCharsetForPropertiesFiles != null) {
element.setAttribute("defaultCharsetForPropertiesFiles", myDefaultCharsetForPropertiesFiles.name());
}
if (myDefaultConsoleCharset != null) {
element.setAttribute("defaultCharsetForConsole", myDefaultConsoleCharset.name());
}
if (myBomForNewUtf8Files != BOMForNewUTF8Files.NEVER) {
element.setAttribute("addBOMForNewFiles", myBomForNewUtf8Files.name());
}
return element;
}
@Override
public void loadState(@Nonnull Element element) {
myMapping.clear();
List<Element> files = element.getChildren("file");
if (!files.isEmpty()) {
Map<VirtualFilePointer, Charset> mapping = new HashMap<>();
for (Element fileElement : files) {
String url = fileElement.getAttributeValue("url");
String charsetName = fileElement.getAttributeValue("charset");
Charset charset = CharsetToolkit.forName(charsetName);
if (charset == null) {
continue;
}
if (PROJECT_URL.equals(url)) {
myProjectCharset = charset;
}
else if (url != null) {
VirtualFilePointer file = VirtualFilePointerManager.getInstance().create(url, this, null);
mapping.put(file, charset);
}
}
myMapping.putAll(mapping);
}
myNative2AsciiForPropertiesFiles = Boolean.parseBoolean(element.getAttributeValue("native2AsciiForPropertiesFiles"));
myDefaultCharsetForPropertiesFiles = CharsetToolkit.forName(element.getAttributeValue("defaultCharsetForPropertiesFiles"));
myDefaultConsoleCharset = CharsetToolkit.forName(element.getAttributeValue("defaultCharsetForConsole"));
myBomForNewUtf8Files = BOMForNewUTF8Files.getByNameOrDefault(element.getAttributeValue("addBOMForNewFiles"));
myModificationTracker.incModificationCount();
}
private void reloadAlreadyLoadedDocuments() {
if (myMapping.isEmpty()) {
return;
}
FileDocumentManagerImpl fileDocumentManager = (FileDocumentManagerImpl)FileDocumentManager.getInstance();
for (VirtualFilePointer pointer : myMapping.keySet()) {
VirtualFile file = pointer.getFile();
Document cachedDocument = file == null ? null : fileDocumentManager.getCachedDocument(file);
if (cachedDocument != null) {
// reload document in the right encoding if someone sneaky (you, BreakpointManager) managed to load the document before project opened
reload(file, myProject, fileDocumentManager);
}
}
}
@Override
@Nullable
public Charset getEncoding(@Nullable VirtualFile virtualFile, boolean useParentDefaults) {
VirtualFile parent = virtualFile;
while (parent != null) {
Charset charset = myMapping.get(new LightFilePointer(parent.getUrl()));
if (charset != null || !useParentDefaults) return charset;
parent = parent.getParent();
}
return getDefaultCharset();
}
@Nonnull
public ModificationTracker getModificationTracker() {
return myModificationTracker;
}
@Override
public void setEncoding(@Nullable final VirtualFile virtualFileOrDir, @Nullable final Charset charset) {
Charset oldCharset;
if (virtualFileOrDir == null) {
oldCharset = myProjectCharset;
myProjectCharset = charset;
}
else {
VirtualFilePointer pointer = VirtualFilePointerManager.getInstance().create(virtualFileOrDir, this, null);
if (charset == null) {
oldCharset = myMapping.remove(pointer);
}
else {
oldCharset = myMapping.put(pointer, charset);
}
}
if (!Comparing.equal(oldCharset, charset) || virtualFileOrDir != null && !Comparing.equal(virtualFileOrDir.getCharset(), charset)) {
myModificationTracker.incModificationCount();
if (virtualFileOrDir != null) {
virtualFileOrDir.setCharset(virtualFileOrDir.getBOM() == null ? charset : null);
}
reloadAllFilesUnder(virtualFileOrDir);
}
}
private static void clearAndReload(@Nonnull VirtualFile virtualFileOrDir, @Nonnull Project project) {
virtualFileOrDir.setCharset(null);
reload(virtualFileOrDir, project, (FileDocumentManagerImpl)FileDocumentManager.getInstance());
}
private static void reload(@Nonnull VirtualFile virtualFile, @Nonnull Project project, @Nonnull FileDocumentManagerImpl documentManager) {
ApplicationManager.getApplication().runWriteAction(() -> {
ProjectLocator.computeWithPreferredProject(virtualFile, project, () -> {
documentManager.contentsChanged(new VFileContentChangeEvent(null, virtualFile, 0, 0, false));
return null;
});
});
}
@Override
@Nonnull
public Collection<Charset> getFavorites() {
Set<Charset> result = widelyKnownCharsets();
result.addAll(myMapping.values());
result.add(getDefaultCharset());
return result;
}
@Nonnull
static Set<Charset> widelyKnownCharsets() {
Set<Charset> result = new HashSet<>();
result.add(StandardCharsets.UTF_8);
result.add(CharsetToolkit.getDefaultSystemCharset());
result.add(CharsetToolkit.getPlatformCharset());
result.add(StandardCharsets.UTF_16);
result.add(StandardCharsets.ISO_8859_1);
result.add(StandardCharsets.US_ASCII);
result.add(EncodingManager.getInstance().getDefaultCharset());
result.add(EncodingManager.getInstance().getDefaultCharsetForPropertiesFiles(null));
result.remove(null);
return result;
}
/**
* @return readonly map of current mappings. to modify mappings use {@link #setMapping(Map)}
*/
@Nonnull
public Map<? extends VirtualFile, ? extends Charset> getAllMappings() {
return myMapping.entrySet().stream().map(e -> Pair.create(e.getKey().getFile(), e.getValue())).filter(e -> e.getFirst() != null)
.collect(Collectors.toMap(p -> p.getFirst(), p -> p.getSecond(), (c1, c2) -> c1));
}
public void setMapping(@Nonnull Map<? extends VirtualFile, ? extends Charset> mapping) {
ApplicationManager.getApplication().assertIsWriteThread();
FileDocumentManager.getInstance().saveAllDocuments(); // consider all files as unmodified
final Map<VirtualFilePointer, Charset> newMap = new HashMap<>(mapping.size());
final Map<VirtualFilePointer, Charset> oldMap = new HashMap<>(myMapping);
// ChangeFileEncodingAction should not start progress "reload files..."
suppressReloadDuring(() -> {
ProjectFileIndex fileIndex = ProjectRootManager.getInstance(myProject).getFileIndex();
for (Map.Entry<? extends VirtualFile, ? extends Charset> entry : mapping.entrySet()) {
VirtualFile virtualFile = entry.getKey();
Charset charset = entry.getValue();
if (charset == null) throw new IllegalArgumentException("Null charset for " + virtualFile + "; mapping: " + mapping);
if (virtualFile == null) {
myProjectCharset = charset;
}
else {
if (!fileIndex.isInContent(virtualFile)) continue;
VirtualFilePointer pointer = VirtualFilePointerManager.getInstance().create(virtualFile, this, null);
if (!virtualFile.isDirectory() && !Comparing.equal(charset, oldMap.get(pointer))) {
Document document;
byte[] bytes;
try {
document = FileDocumentManager.getInstance().getDocument(virtualFile);
if (document == null) throw new IOException();
bytes = virtualFile.contentsToByteArray();
}
catch (IOException e) {
continue;
}
// ask whether to reload/convert when in doubt
boolean changed = new ChangeFileEncodingAction().chosen(document, null, virtualFile, bytes, charset);
if (!changed) continue;
}
newMap.put(pointer, charset);
}
}
});
myMapping.clear();
myMapping.putAll(newMap);
final Set<VirtualFilePointer> changed = new HashSet<>(oldMap.keySet());
for (Map.Entry<VirtualFilePointer, Charset> entry : newMap.entrySet()) {
VirtualFilePointer file = entry.getKey();
Charset charset = entry.getValue();
Charset oldCharset = oldMap.get(file);
if (Comparing.equal(oldCharset, charset)) {
changed.remove(file);
}
}
Set<VirtualFilePointer> added = new HashSet<>(newMap.keySet());
added.removeAll(oldMap.keySet());
Set<VirtualFilePointer> removed = new HashSet<>(oldMap.keySet());
removed.removeAll(newMap.keySet());
changed.addAll(added);
changed.addAll(removed);
changed.remove(null);
if (!changed.isEmpty()) {
Processor<VirtualFile> reloadProcessor = createChangeCharsetProcessor(myProject);
tryStartReloadWithProgress(() -> {
Set<VirtualFile> processed = new HashSet<>();
next:
for (VirtualFilePointer changedFilePointer : changed) {
VirtualFile changedFile = changedFilePointer.getFile();
if (changedFile == null) continue;
for (VirtualFile processedFile : processed) {
if (VfsUtilCore.isAncestor(processedFile, changedFile, false)) continue next;
}
processSubFiles(changedFile, reloadProcessor);
processed.add(changedFile);
}
});
}
myModificationTracker.incModificationCount();
}
@Nonnull
private static Processor<VirtualFile> createChangeCharsetProcessor(@Nonnull Project project) {
return file -> {
if (file.isDirectory()) {
return true;
}
if (!(file instanceof VirtualFileSystemEntry)) return false;
Document cachedDocument = FileDocumentManager.getInstance().getCachedDocument(file);
if (cachedDocument == null) {
if (file.isCharsetSet()) {
file.setCharset(null, null, false);
}
return true;
}
ProgressManager.progress(IdeBundle.message("progress.text.reloading.files"), file.getPresentableUrl());
TransactionGuard.submitTransaction(ApplicationManager.getApplication(), () -> clearAndReload(file, project));
return true;
};
}
/**
* @param file null means all in the project
* @param processor
* @return
*/
private boolean processSubFiles(@Nullable VirtualFile file, @Nonnull final Processor<? super VirtualFile> processor) {
if (file == null) {
for (VirtualFile virtualFile : ProjectRootManager.getInstance(myProject).getContentRoots()) {
if (!processSubFiles(virtualFile, processor)) return false;
}
return true;
}
return VirtualFileVisitor.CONTINUE == VfsUtilCore.visitChildrenRecursively(file, new VirtualFileVisitor<Void>() {
@Override
public boolean visitFile(@Nonnull final VirtualFile file) {
return processor.process(file);
}
});
}
//retrieves encoding for the Project node
@Override
@Nonnull
public Charset getDefaultCharset() {
Charset charset = myProjectCharset;
// if the project charset was not specified, use the IDE encoding, save this back
return charset == null ? myIdeEncodingManager.getDefaultCharset() : charset;
}
@Nullable
public Charset getConfiguredDefaultCharset() {
return myProjectCharset;
}
private static final ThreadLocal<Boolean> SUPPRESS_RELOAD = new ThreadLocal<>();
static void suppressReloadDuring(@Nonnull Runnable action) {
Boolean old = SUPPRESS_RELOAD.get();
try {
SUPPRESS_RELOAD.set(Boolean.TRUE);
action.run();
}
finally {
SUPPRESS_RELOAD.set(old);
}
}
private void tryStartReloadWithProgress(@Nonnull final Runnable reloadAction) {
Boolean suppress = SUPPRESS_RELOAD.get();
if (suppress == Boolean.TRUE) return;
FileDocumentManager.getInstance().saveAllDocuments(); // consider all files as unmodified
ProgressManager.getInstance().runProcessWithProgressSynchronously(() -> suppressReloadDuring(reloadAction), IdeBundle.message("progress.title.reload.files"), false, myProject);
}
private void reloadAllFilesUnder(@Nullable final VirtualFile root) {
tryStartReloadWithProgress(() -> processSubFiles(root, file -> {
if (!(file instanceof VirtualFileSystemEntry)) return true;
Document cachedDocument = FileDocumentManager.getInstance().getCachedDocument(file);
if (cachedDocument != null) {
ProgressManager.progress(IdeBundle.message("progress.text.reloading.file"), file.getPresentableUrl());
TransactionGuard.submitTransaction(myProject, () -> reload(file, myProject, (FileDocumentManagerImpl)FileDocumentManager.getInstance()));
}
// for not loaded files deep under project, reset encoding to give them chance re-detect the right one later
else if (file.isCharsetSet() && !file.equals(root)) {
file.setCharset(null);
}
return true;
}));
}
@Override
public boolean isNative2Ascii(@Nonnull final VirtualFile virtualFile) {
return FileTypeRegistry.getInstance().isFileOfType(virtualFile, InternalStdFileTypes.PROPERTIES) && myNative2AsciiForPropertiesFiles;
}
@Override
public boolean isNative2AsciiForPropertiesFiles() {
return myNative2AsciiForPropertiesFiles;
}
@Override
public void setNative2AsciiForPropertiesFiles(final VirtualFile virtualFile, final boolean native2Ascii) {
if (myNative2AsciiForPropertiesFiles != native2Ascii) {
myNative2AsciiForPropertiesFiles = native2Ascii;
EncodingManagerImpl.firePropertyChange(null, PROP_NATIVE2ASCII_SWITCH, !native2Ascii, native2Ascii, myProject);
}
}
@Nonnull // empty means system default
@Override
public String getDefaultCharsetName() {
Charset charset = getEncoding(null, false);
return charset == null ? "" : charset.name();
}
@Override
public void setDefaultCharsetName(@Nonnull String name) {
setEncoding(null, name.isEmpty() ? null : CharsetToolkit.forName(name));
}
@Override
@Nullable
public Charset getDefaultCharsetForPropertiesFiles(@Nullable final VirtualFile virtualFile) {
return myDefaultCharsetForPropertiesFiles;
}
@Override
public void setDefaultCharsetForPropertiesFiles(@Nullable final VirtualFile virtualFile, @Nullable Charset charset) {
Charset old = myDefaultCharsetForPropertiesFiles;
if (!Comparing.equal(old, charset)) {
myDefaultCharsetForPropertiesFiles = charset;
EncodingManagerImpl.firePropertyChange(null, PROP_PROPERTIES_FILES_ENCODING, old, charset, myProject);
}
}
@Override
@Nonnull
public Charset getDefaultConsoleEncoding() {
return myIdeEncodingManager.getDefaultConsoleEncoding();
}
@Override
@Nullable
public Charset getCachedCharsetFromContent(@Nonnull Document document) {
return myIdeEncodingManager.getCachedCharsetFromContent(document);
}
public enum BOMForNewUTF8Files {
ALWAYS("with BOM"),
NEVER("with NO BOM"),
WINDOWS_ONLY("with BOM under Windows, with no BOM otherwise");
private final String name;
BOMForNewUTF8Files(@Nonnull String name) {
this.name = name;
}
@Override
public String toString() {
return name;
}
@Nonnull
private static BOMForNewUTF8Files getByNameOrDefault(@Nullable String name) {
if (!StringUtil.isEmpty(name)) {
for (BOMForNewUTF8Files value : values()) {
if (value.name().equalsIgnoreCase(name)) {
return value;
}
}
}
return NEVER;
}
}
public void setBOMForNewUtf8Files(@Nonnull BOMForNewUTF8Files option) {
myBomForNewUtf8Files = option;
}
@Nonnull
BOMForNewUTF8Files getBOMForNewUTF8Files() {
return myBomForNewUtf8Files;
}
@Override
public boolean shouldAddBOMForNewUtf8File() {
switch (myBomForNewUtf8Files) {
case ALWAYS:
return true;
case NEVER:
return false;
case WINDOWS_ONLY:
return SystemInfo.isWindows;
default:
throw new IllegalStateException(myBomForNewUtf8Files.toString());
}
}
}
| |
// SECTION-START[License Header]
// <editor-fold defaultstate="collapsed" desc=" Generated License ">
/*
* Java Object Management and Configuration
* Copyright (C) Christian Schulte, 2005-206
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* o Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* $JOMC$
*
*/
// </editor-fold>
// SECTION-END
package org.jomc.sequences.util;
import java.beans.Beans;
import java.beans.ExceptionListener;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.Serializable;
import java.math.BigInteger;
import java.util.LinkedList;
import java.util.List;
import javax.swing.event.SwingPropertyChangeSupport;
import javax.swing.table.AbstractTableModel;
import org.jomc.sequences.Sequence;
import org.jomc.sequences.SequencesException;
import org.jomc.sequences.SequencesSystemException;
// SECTION-START[Documentation]
// <editor-fold defaultstate="collapsed" desc=" Generated Documentation ">
/**
* Swing TableModel Java Bean for displaying and editing a system's SequenceDirectory.
*
* <dl>
* <dt><b>Identifier:</b></dt><dd>org.jomc.sequences.util.SequencesTableModel</dd>
* <dt><b>Name:</b></dt><dd>JOMC Sequences Utilities</dd>
* <dt><b>Specifications:</b></dt>
* <dd>javax.swing.table.TableModel</dd>
* <dt><b>Abstract:</b></dt><dd>No</dd>
* <dt><b>Final:</b></dt><dd>No</dd>
* <dt><b>Stateless:</b></dt><dd>No</dd>
* </dl>
*
* @author <a href="mailto:schulte2005@users.sourceforge.net">Christian Schulte</a> 1.0
* @version 1.0-beta-3-SNAPSHOT
*/
// </editor-fold>
// SECTION-END
// SECTION-START[Annotations]
// <editor-fold defaultstate="collapsed" desc=" Generated Annotations ">
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
// </editor-fold>
// SECTION-END
public class SequencesTableModel extends AbstractTableModel implements Serializable
{
// SECTION-START[TableModel]
public int getRowCount()
{
try
{
return this.getSequences().size();
}
catch ( final SequencesSystemException e )
{
this.fireExceptionThrown( e );
}
return 0;
}
public int getColumnCount()
{
return DEFAULT_COLUMN_COUNT;
}
@Override
public String getColumnName( final int columnIndex )
{
final String columnName;
switch ( columnIndex )
{
case NAME_COLUMN_INDEX:
columnName = this.getNameColumnTitle( this.getLocale() );
break;
case MINIMUM_COLUMN_INDEX:
columnName = this.getMinimumColumnTitle( this.getLocale() );
break;
case MAXIMUM_COLUMN_INDEX:
columnName = this.getMaximumColumnTitle( this.getLocale() );
break;
case INCREMENT_COLUMN_INDEX:
columnName = this.getIncrementColumnTitle( this.getLocale() );
break;
case VALUE_COLUMN_INDEX:
columnName = this.getValueColumnTitle( this.getLocale() );
break;
default:
columnName = super.getColumnName( columnIndex );
this.getLogger().warn( this.getIllegalColumnIndexMessage( this.getLocale(), columnIndex ) );
break;
}
return columnName;
}
@Override
public Class<?> getColumnClass( final int columnIndex )
{
final Class<?> columnClass;
switch ( columnIndex )
{
case NAME_COLUMN_INDEX:
columnClass = String.class;
break;
case MINIMUM_COLUMN_INDEX:
case MAXIMUM_COLUMN_INDEX:
case INCREMENT_COLUMN_INDEX:
case VALUE_COLUMN_INDEX:
columnClass = BigInteger.class;
break;
default:
columnClass = super.getColumnClass( columnIndex );
this.getLogger().warn( this.getIllegalColumnIndexMessage( this.getLocale(), columnIndex ) );
break;
}
return columnClass;
}
@Override
public boolean isCellEditable( final int rowIndex, final int columnIndex )
{
final boolean cellEditable;
switch ( columnIndex )
{
case NAME_COLUMN_INDEX:
cellEditable = this.getNameColumnEditable();
break;
case MINIMUM_COLUMN_INDEX:
cellEditable = this.getMinimumColumnEditable();
break;
case MAXIMUM_COLUMN_INDEX:
cellEditable = this.getMaximumColumnEditable();
break;
case INCREMENT_COLUMN_INDEX:
cellEditable = this.getIncrementColumnEditable();
break;
case VALUE_COLUMN_INDEX:
cellEditable = this.getValueColumnEditable();
break;
default:
cellEditable = super.isCellEditable( rowIndex, columnIndex );
this.getLogger().warn( this.getIllegalColumnIndexMessage( this.getLocale(), columnIndex ) );
break;
}
return cellEditable;
}
public Object getValueAt( final int rowIndex, final int columnIndex )
{
try
{
final Object value;
final Sequence sequence = this.getSequences().get( rowIndex );
switch ( columnIndex )
{
case NAME_COLUMN_INDEX:
value = sequence.getName();
break;
case MINIMUM_COLUMN_INDEX:
value = sequence.getMinimum();
break;
case MAXIMUM_COLUMN_INDEX:
value = sequence.getMaximum();
break;
case INCREMENT_COLUMN_INDEX:
value = sequence.getIncrement();
break;
case VALUE_COLUMN_INDEX:
value = sequence.getValue();
break;
default:
value = null;
this.getLogger().warn( this.getIllegalColumnIndexMessage( this.getLocale(), columnIndex ) );
break;
}
return value;
}
catch ( final SequencesSystemException e )
{
this.fireExceptionThrown( e );
}
catch ( final IndexOutOfBoundsException e )
{
this.fireExceptionThrown( e );
}
return null;
}
@Override
public void setValueAt( final Object aValue, final int rowIndex, final int columnIndex )
{
try
{
final Sequence sequence = this.getSequences().get( rowIndex );
final String name = sequence.getName();
final long revision = sequence.getRevision();
switch ( columnIndex )
{
case NAME_COLUMN_INDEX:
sequence.setName( aValue.toString() );
break;
case MINIMUM_COLUMN_INDEX:
sequence.setMinimum( (Long) aValue );
break;
case MAXIMUM_COLUMN_INDEX:
sequence.setMaximum( (Long) aValue );
break;
case INCREMENT_COLUMN_INDEX:
sequence.setIncrement( (Long) aValue );
break;
case VALUE_COLUMN_INDEX:
sequence.setValue( (Long) aValue );
break;
default:
this.getLogger().warn( this.getIllegalColumnIndexMessage( this.getLocale(), columnIndex ) );
break;
}
if ( !Beans.isDesignTime() )
{
this.getSequenceDirectory().editSequence( name, revision, sequence );
}
this.fireTableRowsUpdated( rowIndex, rowIndex );
}
catch ( final SequencesException e )
{
this.fireExceptionThrown( e );
this.sequences = null;
this.fireTableDataChanged();
}
catch ( final SequencesSystemException e )
{
this.fireExceptionThrown( e );
this.sequences = null;
this.fireTableDataChanged();
}
catch ( final IndexOutOfBoundsException e )
{
this.fireExceptionThrown( e );
this.sequences = null;
this.fireTableDataChanged();
}
}
// SECTION-END
// SECTION-START[SequencesTableModel]
/** Index of the column displaying a sequence's name. */
public static final int NAME_COLUMN_INDEX = 0;
/** Index of the column displaying a sequence's minimum value. */
public static final int MINIMUM_COLUMN_INDEX = 1;
/** Index of the column displaying a sequence's maximum value. */
public static final int MAXIMUM_COLUMN_INDEX = 2;
/** Index of the column displaying a sequence's increment value. */
public static final int INCREMENT_COLUMN_INDEX = 3;
/** Index of the column displaying a sequence's value. */
public static final int VALUE_COLUMN_INDEX = 4;
/** Name of property {@code nameColumnEditable}. */
public static final String NAME_COLUMN_EDITABLE =
"org.jomc.sequences.util.SequencesTableModel.NAME_COLUMN_EDITABLE";
/** Name of property {@code minimumColumnEditable}. */
public static final String MINIMUM_COLUMN_EDITABLE =
"org.jomc.sequences.util.SequencesTableModel.MINIMUM_COLUMN_EDITABLE";
/** Name of property {@code maximumColumnEditable}. */
public static final String MAXIMUM_COLUMN_EDITABLE =
"org.jomc.sequences.util.SequencesTableModel.MAXIMUM_COLUMN_EDITABLE";
/** Name of property {@code incrementColumnEditable}. */
public static final String INCREMENT_COLUMN_EDITABLE =
"org.jomc.sequences.util.SequencesTableModel.INCREMENT_COLUMN_EDITABLE";
/** Name of property {@code valueColumnEditable}. */
public static final String VALUE_COLUMN_EDITABLE =
"org.jomc.sequences.util.SequencesTableModel.VALUE_COLUMN_EDITABLE";
/** Name of property {@code sequenceFilter}. */
public static final String SEQUENCE_FILTER =
"org.jomc.sequences.util.SequencesTableModel.SEQUENCE_FILTER";
/** Default number of table columns. */
private static final int DEFAULT_COLUMN_COUNT = 5;
/**
* Flag indicating that the {@code name} column is editable.
* @serial
*/
private Boolean nameColumnEditable;
/**
* Flag indicating that the {@code minimum} column is editable.
* @serial
*/
private Boolean minimumColumnEditable;
/**
* Flag indicating that the {@code maximum} column is editable.
* @serial
*/
private Boolean maximumColumnEditable;
/**
* Flag indicating that the {@code increment} column is editable.
* @serial
*/
private Boolean incrementColumnEditable;
/**
* Flag indicating that the {@code value} column is editable.
* @serial
*/
private Boolean valueColumnEditable;
/**
* Entity filter.
* @serial
*/
private Sequence sequenceFilter;
/** Sequences of the model. */
private transient List<Sequence> sequences;
/**
* Change support.
* @serial
*/
private PropertyChangeSupport changeSupport = new SwingPropertyChangeSupport( this );
/**
* Gets the flag indicating that the {@code name} column is editable.
*
* @return {@code true} if the {@code name} column is editable; {@code false} if not.
*/
public final Boolean getNameColumnEditable()
{
if ( this.nameColumnEditable == null )
{
this.nameColumnEditable = this.isNameColumnEditableByDefault();
this.changeSupport.firePropertyChange( NAME_COLUMN_EDITABLE, null, this.nameColumnEditable );
}
return this.nameColumnEditable;
}
/**
* Sets the flag indicating that the {@code name} column is editable.
*
* @param value {@code true} if the {@code name} column should be editable; {@code false} if not.
*/
public final void setNameColumnEditable( final Boolean value )
{
final Boolean oldValue = this.nameColumnEditable;
this.nameColumnEditable = value;
this.changeSupport.firePropertyChange( NAME_COLUMN_EDITABLE, oldValue, this.nameColumnEditable );
}
/**
* Gets the flag indicating that the {@code minimum} column is editable.
*
* @return {@code true} if the {@code minimum} column is editable;{@code false} if not.
*/
public final Boolean getMinimumColumnEditable()
{
if ( this.minimumColumnEditable == null )
{
this.minimumColumnEditable = this.isMinimumColumnEditableByDefault();
this.changeSupport.firePropertyChange( MINIMUM_COLUMN_EDITABLE, null, this.minimumColumnEditable );
}
return this.minimumColumnEditable;
}
/**
* Set the flag indicating that the {@code minimum} column is editable.
*
* @param value {@code true} if the {@code minimum} column should be editable; {@code false} if not.
*/
public final void setMinimumColumnEditable( final Boolean value )
{
final Boolean oldValue = this.minimumColumnEditable;
this.minimumColumnEditable = value;
this.changeSupport.firePropertyChange( MINIMUM_COLUMN_EDITABLE, oldValue, this.minimumColumnEditable );
}
/**
* Gets the flag indicating that the {@code maximum} column is editable.
*
* @return {@code true} if the {@code maximum} column is editable; {@code false} if not.
*/
public final Boolean getMaximumColumnEditable()
{
if ( this.maximumColumnEditable == null )
{
this.maximumColumnEditable = this.isMaximumColumnEditableByDefault();
this.changeSupport.firePropertyChange( MAXIMUM_COLUMN_EDITABLE, null, this.maximumColumnEditable );
}
return this.maximumColumnEditable;
}
/**
* Sets the flag indicating that the {@code maximum} column is editable.
*
* @param value {@code true} if the {@code maximum} column should be editable; {@code false} if not.
*/
public final void setMaximumColumnEditable( final Boolean value )
{
final Boolean oldValue = this.maximumColumnEditable;
this.maximumColumnEditable = value;
this.changeSupport.firePropertyChange( MAXIMUM_COLUMN_EDITABLE, oldValue, this.maximumColumnEditable );
}
/**
* Gets the flag indicating that the {@code increment} column is editable.
*
* @return {@code true} if the {@code increment} column is editable; {@code false} if not.
*/
public final Boolean getIncrementColumnEditable()
{
if ( this.incrementColumnEditable == null )
{
this.incrementColumnEditable = this.isIncrementColumnEditableByDefault();
this.changeSupport.firePropertyChange( INCREMENT_COLUMN_EDITABLE, null, this.incrementColumnEditable );
}
return this.incrementColumnEditable;
}
/**
* Sets the flag indicating that the {@code increment} column is editable.
*
* @param value {@code true} if the {@code increment} column should be editable; {@code false} if not.
*/
public final void setIncrementColumnEditable( final Boolean value )
{
final Boolean oldValue = this.incrementColumnEditable;
this.incrementColumnEditable = value;
this.changeSupport.firePropertyChange( INCREMENT_COLUMN_EDITABLE, oldValue, this.incrementColumnEditable );
}
/**
* Gets the flag indicating that the {@code value} column is editable.
*
* @return {@code true} if the {@code value} column is editable; {@code false} if not.
*/
public final Boolean getValueColumnEditable()
{
if ( this.valueColumnEditable == null )
{
this.valueColumnEditable = this.isValueColumnEditableByDefault();
this.changeSupport.firePropertyChange( VALUE_COLUMN_EDITABLE, null, this.valueColumnEditable );
}
return this.valueColumnEditable;
}
/**
* Sets the flag indicating that the {@code value} column is editable.
*
* @param value {@code true} if the {@code value} column should be editable; {@code false} if not.
*/
public final void setValueColumnEditable( final Boolean value )
{
final Boolean oldValue = this.valueColumnEditable;
this.valueColumnEditable = value;
this.changeSupport.firePropertyChange( VALUE_COLUMN_EDITABLE, oldValue, this.valueColumnEditable );
}
/**
* Gets the entity used for filtering sequences.
*
* @return Entity used for filtering sequences or {@code null}.
*/
public final Sequence getSequenceFilter()
{
return this.sequenceFilter;
}
/**
* Sets the entity used for filtering sequences.
*
* @param value Entity to use for filtering sequences or {@code null}.
*/
public final void setSequenceFilter( final Sequence value )
{
final Sequence oldValue = this.sequenceFilter;
this.sequenceFilter = value;
this.sequences = null;
this.fireTableDataChanged();
this.changeSupport.firePropertyChange( SEQUENCE_FILTER, oldValue, this.sequenceFilter );
}
/**
* Add a {@code PropertyChangeListener} to the listener list.
* <p>The listener is registered for all properties. The same listener object may be added more than once, and will
* be called as many times as it is added. If {@code listener} is {@code null}, no exception is thrown and no action
* is taken.</p>
*
* @param listener The listener to be added.
*/
public final void addPropertyChangeListener( final PropertyChangeListener listener )
{
this.changeSupport.addPropertyChangeListener( listener );
}
/**
* Removes a {@code PropertyChangeListener} from the listener list.
* <p>This removes a {@code PropertyChangeListener} that was registered for all properties. If {@code listener} was
* added more than once, it will be notified one less time after being removed. If {@code listener} is {@code null},
* or was never added, no exception is thrown and no action is taken.</p>
*
* @param listener The listener to be removed.
*/
public final void removePropertyChangeListener( final PropertyChangeListener listener )
{
this.changeSupport.removePropertyChangeListener( listener );
}
/**
* Gets an array of all the listeners that were added to the instance.
* <p>If some listeners have been added with a named property, then the returned array will be a mixture of
* {@code PropertyChangeListeners} and {@code PropertyChangeListenerProxy}s. If the calling method is interested in
* distinguishing the listeners then it must test each element to see if it's a {@code PropertyChangeListenerProxy},
* perform the cast, and examine the parameter.</p>
*
* @return All of the {@code PropertyChangeListeners} added or an empty array if no listeners have been added.
*
* @see PropertyChangeSupport#getPropertyChangeListeners()
*/
public final PropertyChangeListener[] getPropertyChangeListeners()
{
return this.changeSupport.getPropertyChangeListeners();
}
/**
* Add a {@code PropertyChangeListener} for a specific property.
* <p>The listener will be invoked only when an event for that specific property occurs. The same listener object
* may be added more than once. For each property, the listener will be invoked the number of times it was added for
* that property. If {@code propertyName} or {@code listener} is {@code null}, no exception is thrown and no action
* is taken.</p>
*
* @param propertyName The name of the property to listen on.
* @param listener The listener to be added.
*/
public final void addPropertyChangeListener( final String propertyName, final PropertyChangeListener listener )
{
this.changeSupport.addPropertyChangeListener( propertyName, listener );
}
/**
* Removes a {@code PropertyChangeListener} for a specific property.
* <p>If {@code listener} was added more than once to the instance for the specified property, it will be notified
* one less time after being removed. If {@code propertyName} is {@code null}, no exception is thrown and no action
* is taken. If {@code listener} is {@code null}, or was never added for the specified property, no exception is
* thrown and no action is taken.</p>
*
* @param propertyName The name of the property that was listened on.
* @param listener The listener to be removed.
*/
public final void removePropertyChangeListener( final String propertyName, final PropertyChangeListener listener )
{
this.changeSupport.removePropertyChangeListener( propertyName, listener );
}
/**
* Gets an array of all the listeners which have been associated with the named property.
*
* @param propertyName The name of the property being listened to.
*
* @return All of the {@code PropertyChangeListeners} associated with the named property. If no such listeners have
* been added, or if {@code propertyName} is {@code null}, an empty array is returned.
*/
public final PropertyChangeListener[] getPropertyChangeListeners( final String propertyName )
{
return this.changeSupport.getPropertyChangeListeners( propertyName );
}
/**
* Gets the entities of the model.
* <p>This accessor method returns a reference to the live list, not a snapshot. Therefore any modification you make
* to the returned list will be present inside the object.</p>
*
* @return The entities of the model.
*
* @throws SequencesSystemException if searching entities fails.
*/
protected List<Sequence> getSequences() throws SequencesSystemException
{
if ( this.sequences == null )
{
this.sequences = new LinkedList<Sequence>();
if ( !Beans.isDesignTime() )
{
this.sequences.addAll( this.getSequenceDirectory().searchSequences(
this.getSequenceFilter() != null ? this.getSequenceFilter().getName() : null ) );
}
}
return this.sequences;
}
/**
* Notifies any available {@code ExceptionListener} whenever a recoverable exception has been caught.
*
* @param e The exception that was caught.
*/
protected void fireExceptionThrown( final Exception e )
{
this.getLogger().error( e );
if ( this.getExceptionListener() != null )
{
for ( ExceptionListener l : this.getExceptionListener() )
{
l.exceptionThrown( e );
}
}
}
// SECTION-END
// SECTION-START[Constructors]
// <editor-fold defaultstate="collapsed" desc=" Generated Constructors ">
/** Creates a new {@code SequencesTableModel} instance. */
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
public SequencesTableModel()
{
// SECTION-START[Default Constructor]
super();
// SECTION-END
}
// </editor-fold>
// SECTION-END
// SECTION-START[Dependencies]
// <editor-fold defaultstate="collapsed" desc=" Generated Dependencies ">
/**
* Gets the {@code <ExceptionListener>} dependency.
* <p>
* This method returns any available object of the {@code <JOMC :: Sequences :: ExceptionListener>} specification at specification level 1.0.
* That specification does not apply to any scope. A new object is returned whenever requested and bound to this instance.
* </p>
* <dl>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl>
* @return The {@code <ExceptionListener>} dependency.
* {@code null} if no object is available.
* @throws org.jomc.ObjectManagementException if getting the dependency instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private java.beans.ExceptionListener[] getExceptionListener()
{
return (java.beans.ExceptionListener[]) org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getDependency( this, "ExceptionListener" );
}
/**
* Gets the {@code <Locale>} dependency.
* <p>
* This method returns the {@code <default>} object of the {@code <java.util.Locale>} specification at specification level 1.1.
* That specification does not apply to any scope. A new object is returned whenever requested and bound to this instance.
* </p>
* <dl>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl>
* @return The {@code <Locale>} dependency.
* @throws org.jomc.ObjectManagementException if getting the dependency instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private java.util.Locale getLocale()
{
final java.util.Locale _d = (java.util.Locale) org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getDependency( this, "Locale" );
assert _d != null : "'Locale' dependency not found.";
return _d;
}
/**
* Gets the {@code <Logger>} dependency.
* <p>
* This method returns any available object of the {@code <org.jomc.logging.Logger>} specification at specification level 1.0.
* That specification does not apply to any scope. A new object is returned whenever requested and bound to this instance.
* </p>
* <p><strong>Properties:</strong>
* <table border="1" width="100%" cellpadding="3" cellspacing="0">
* <tr class="TableSubHeadingColor">
* <th align="left" scope="col" nowrap><b>Name</b></th>
* <th align="left" scope="col" nowrap><b>Type</b></th>
* <th align="left" scope="col" nowrap><b>Documentation</b></th>
* </tr>
* <tr class="TableRow">
* <td align="left" valign="top" nowrap>{@code <name>}</td>
* <td align="left" valign="top" nowrap>{@code java.lang.String}</td>
* <td align="left" valign="top"></td>
* </tr>
* </table>
* </p>
* <dl>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl>
* @return The {@code <Logger>} dependency.
* @throws org.jomc.ObjectManagementException if getting the dependency instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private org.jomc.logging.Logger getLogger()
{
final org.jomc.logging.Logger _d = (org.jomc.logging.Logger) org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getDependency( this, "Logger" );
assert _d != null : "'Logger' dependency not found.";
return _d;
}
/**
* Gets the {@code <SequenceDirectory>} dependency.
* <p>
* This method returns any available object of the {@code <org.jomc.sequences.SequenceDirectory>} specification at specification level 1.0.
* That specification applies to {@code <Singleton>} scope. The singleton object is returned whenever requested and bound to this instance.
* </p>
* <dl>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl>
* @return The {@code <SequenceDirectory>} dependency.
* @throws org.jomc.ObjectManagementException if getting the dependency instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private org.jomc.sequences.SequenceDirectory getSequenceDirectory()
{
final org.jomc.sequences.SequenceDirectory _d = (org.jomc.sequences.SequenceDirectory) org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getDependency( this, "SequenceDirectory" );
assert _d != null : "'SequenceDirectory' dependency not found.";
return _d;
}
// </editor-fold>
// SECTION-END
// SECTION-START[Properties]
// <editor-fold defaultstate="collapsed" desc=" Generated Properties ">
/**
* Gets the value of the {@code <incrementColumnEditableByDefault>} property.
* <p><dl>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl></p>
* @return Flag indicating that the "increment" column is editable by default.
* @throws org.jomc.ObjectManagementException if getting the property instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private java.lang.Boolean isIncrementColumnEditableByDefault()
{
final java.lang.Boolean _p = (java.lang.Boolean) org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getProperty( this, "incrementColumnEditableByDefault" );
assert _p != null : "'incrementColumnEditableByDefault' property not found.";
return _p;
}
/**
* Gets the value of the {@code <maximumColumnEditableByDefault>} property.
* <p><dl>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl></p>
* @return Flag indicating that the "maximum" column is editable by default.
* @throws org.jomc.ObjectManagementException if getting the property instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private java.lang.Boolean isMaximumColumnEditableByDefault()
{
final java.lang.Boolean _p = (java.lang.Boolean) org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getProperty( this, "maximumColumnEditableByDefault" );
assert _p != null : "'maximumColumnEditableByDefault' property not found.";
return _p;
}
/**
* Gets the value of the {@code <minimumColumnEditableByDefault>} property.
* <p><dl>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl></p>
* @return Flag indicating that the "minimum" column is editable by default.
* @throws org.jomc.ObjectManagementException if getting the property instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private java.lang.Boolean isMinimumColumnEditableByDefault()
{
final java.lang.Boolean _p = (java.lang.Boolean) org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getProperty( this, "minimumColumnEditableByDefault" );
assert _p != null : "'minimumColumnEditableByDefault' property not found.";
return _p;
}
/**
* Gets the value of the {@code <nameColumnEditableByDefault>} property.
* <p><dl>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl></p>
* @return Flag indicating that the "name" column is editable by default.
* @throws org.jomc.ObjectManagementException if getting the property instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private java.lang.Boolean isNameColumnEditableByDefault()
{
final java.lang.Boolean _p = (java.lang.Boolean) org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getProperty( this, "nameColumnEditableByDefault" );
assert _p != null : "'nameColumnEditableByDefault' property not found.";
return _p;
}
/**
* Gets the value of the {@code <valueColumnEditableByDefault>} property.
* <p><dl>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl></p>
* @return Flag indicating that the "value" column is editable by default.
* @throws org.jomc.ObjectManagementException if getting the property instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private java.lang.Boolean isValueColumnEditableByDefault()
{
final java.lang.Boolean _p = (java.lang.Boolean) org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getProperty( this, "valueColumnEditableByDefault" );
assert _p != null : "'valueColumnEditableByDefault' property not found.";
return _p;
}
// </editor-fold>
// SECTION-END
// SECTION-START[Messages]
// <editor-fold defaultstate="collapsed" desc=" Generated Messages ">
/**
* Gets the text of the {@code <illegalColumnIndexMessage>} message.
* <p><dl>
* <dt><b>Languages:</b></dt>
* <dd>English (default)</dd>
* <dd>Deutsch</dd>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl></p>
* @param locale The locale of the message to return.
* @param columnIndex Format argument.
* @return The text of the {@code <illegalColumnIndexMessage>} message for {@code locale}.
* @throws org.jomc.ObjectManagementException if getting the message instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private String getIllegalColumnIndexMessage( final java.util.Locale locale, final java.lang.Number columnIndex )
{
final String _m = org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getMessage( this, "illegalColumnIndexMessage", locale, columnIndex );
assert _m != null : "'illegalColumnIndexMessage' message not found.";
return _m;
}
/**
* Gets the text of the {@code <incrementColumnTitle>} message.
* <p><dl>
* <dt><b>Languages:</b></dt>
* <dd>English (default)</dd>
* <dd>Deutsch</dd>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl></p>
* @param locale The locale of the message to return.
* @return The text of the {@code <incrementColumnTitle>} message for {@code locale}.
* @throws org.jomc.ObjectManagementException if getting the message instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private String getIncrementColumnTitle( final java.util.Locale locale )
{
final String _m = org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getMessage( this, "incrementColumnTitle", locale );
assert _m != null : "'incrementColumnTitle' message not found.";
return _m;
}
/**
* Gets the text of the {@code <maximumColumnTitle>} message.
* <p><dl>
* <dt><b>Languages:</b></dt>
* <dd>English (default)</dd>
* <dd>Deutsch</dd>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl></p>
* @param locale The locale of the message to return.
* @return The text of the {@code <maximumColumnTitle>} message for {@code locale}.
* @throws org.jomc.ObjectManagementException if getting the message instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private String getMaximumColumnTitle( final java.util.Locale locale )
{
final String _m = org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getMessage( this, "maximumColumnTitle", locale );
assert _m != null : "'maximumColumnTitle' message not found.";
return _m;
}
/**
* Gets the text of the {@code <minimumColumnTitle>} message.
* <p><dl>
* <dt><b>Languages:</b></dt>
* <dd>English (default)</dd>
* <dd>Deutsch</dd>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl></p>
* @param locale The locale of the message to return.
* @return The text of the {@code <minimumColumnTitle>} message for {@code locale}.
* @throws org.jomc.ObjectManagementException if getting the message instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private String getMinimumColumnTitle( final java.util.Locale locale )
{
final String _m = org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getMessage( this, "minimumColumnTitle", locale );
assert _m != null : "'minimumColumnTitle' message not found.";
return _m;
}
/**
* Gets the text of the {@code <nameColumnTitle>} message.
* <p><dl>
* <dt><b>Languages:</b></dt>
* <dd>English (default)</dd>
* <dd>Deutsch</dd>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl></p>
* @param locale The locale of the message to return.
* @return The text of the {@code <nameColumnTitle>} message for {@code locale}.
* @throws org.jomc.ObjectManagementException if getting the message instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private String getNameColumnTitle( final java.util.Locale locale )
{
final String _m = org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getMessage( this, "nameColumnTitle", locale );
assert _m != null : "'nameColumnTitle' message not found.";
return _m;
}
/**
* Gets the text of the {@code <valueColumnTitle>} message.
* <p><dl>
* <dt><b>Languages:</b></dt>
* <dd>English (default)</dd>
* <dd>Deutsch</dd>
* <dt><b>Final:</b></dt><dd>No</dd>
* </dl></p>
* @param locale The locale of the message to return.
* @return The text of the {@code <valueColumnTitle>} message for {@code locale}.
* @throws org.jomc.ObjectManagementException if getting the message instance fails.
*/
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
private String getValueColumnTitle( final java.util.Locale locale )
{
final String _m = org.jomc.ObjectManagerFactory.getObjectManager( this.getClass().getClassLoader() ).getMessage( this, "valueColumnTitle", locale );
assert _m != null : "'valueColumnTitle' message not found.";
return _m;
}
// </editor-fold>
// SECTION-END
}
| |
package org.bitcoin;
import org.junit.Test;
import javax.xml.bind.DatatypeConverter;
import static org.bitcoin.NativeSecp256k1Util.*;
/**
* This class holds test cases defined for testing this library.
*/
public class NativeSecp256k1Test {
//TODO improve comments/add more tests
/**
* This tests verify() for a valid signature
*/
@Test
public void testVerifyPos() throws AssertFailException{
byte[] data = DatatypeConverter.parseHexBinary("CF80CD8AED482D5D1527D7DC72FCEFF84E6326592848447D2DC0B0E87DFC9A90"); //sha256hash of "testing"
byte[] sig = DatatypeConverter.parseHexBinary("3044022079BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F817980220294F14E883B3F525B5367756C2A11EF6CF84B730B36C17CB0C56F0AAB2C98589");
byte[] pub = DatatypeConverter.parseHexBinary("040A629506E1B65CD9D2E0BA9C75DF9C4FED0DB16DC9625ED14397F0AFC836FAE595DC53F8B0EFE61E703075BD9B143BAC75EC0E19F82A2208CAEB32BE53414C40");
boolean result = NativeSecp256k1.verify( data, sig, pub);
assertEquals( result, true , "testVerifyPos");
}
/**
* This tests verify() for a non-valid signature
*/
@Test
public void testVerifyNeg() throws AssertFailException{
byte[] data = DatatypeConverter.parseHexBinary("CF80CD8AED482D5D1527D7DC72FCEFF84E6326592848447D2DC0B0E87DFC9A91"); //sha256hash of "testing"
byte[] sig = DatatypeConverter.parseHexBinary("3044022079BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F817980220294F14E883B3F525B5367756C2A11EF6CF84B730B36C17CB0C56F0AAB2C98589");
byte[] pub = DatatypeConverter.parseHexBinary("040A629506E1B65CD9D2E0BA9C75DF9C4FED0DB16DC9625ED14397F0AFC836FAE595DC53F8B0EFE61E703075BD9B143BAC75EC0E19F82A2208CAEB32BE53414C40");
boolean result = NativeSecp256k1.verify( data, sig, pub);
assertEquals( result, false , "testVerifyNeg");
}
/**
* This tests secret key verify() for a valid secretkey
*/
@Test
public void testSecKeyVerifyPos() throws AssertFailException{
byte[] sec = DatatypeConverter.parseHexBinary("67E56582298859DDAE725F972992A07C6C4FB9F62A8FFF58CE3CA926A1063530");
boolean result = NativeSecp256k1.secKeyVerify( sec );
assertEquals( result, true , "testSecKeyVerifyPos");
}
/**
* This tests secret key verify() for a invalid secretkey
*/
@Test
public void testSecKeyVerifyNeg() throws AssertFailException{
byte[] sec = DatatypeConverter.parseHexBinary("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF");
boolean result = NativeSecp256k1.secKeyVerify( sec );
assertEquals( result, false , "testSecKeyVerifyNeg");
}
/**
* This tests public key create() for a valid secretkey
*/
@Test
public void testPubKeyCreatePos() throws AssertFailException{
byte[] sec = DatatypeConverter.parseHexBinary("67E56582298859DDAE725F972992A07C6C4FB9F62A8FFF58CE3CA926A1063530");
byte[] resultArr = NativeSecp256k1.computePubkey(sec, false);
String pubkeyString = DatatypeConverter.printHexBinary(resultArr);
assertEquals( pubkeyString , "04C591A8FF19AC9C4E4E5793673B83123437E975285E7B442F4EE2654DFFCA5E2D2103ED494718C697AC9AEBCFD19612E224DB46661011863ED2FC54E71861E2A6" , "testPubKeyCreatePos");
}
/**
* This tests public key create() for a invalid secretkey
*/
@Test
public void testPubKeyCreateNeg() throws AssertFailException{
byte[] sec = DatatypeConverter.parseHexBinary("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF");
byte[] resultArr = NativeSecp256k1.computePubkey(sec, false);
String pubkeyString = DatatypeConverter.printHexBinary(resultArr);
assertEquals( pubkeyString, "" , "testPubKeyCreateNeg");
}
/**
* This tests sign() for a valid secretkey
*/
@Test
public void testSignPos() throws AssertFailException{
byte[] data = DatatypeConverter.parseHexBinary("CF80CD8AED482D5D1527D7DC72FCEFF84E6326592848447D2DC0B0E87DFC9A90"); //sha256hash of "testing"
byte[] sec = DatatypeConverter.parseHexBinary("67E56582298859DDAE725F972992A07C6C4FB9F62A8FFF58CE3CA926A1063530");
byte[] resultArr = NativeSecp256k1.sign(data, sec);
String sigString = DatatypeConverter.printHexBinary(resultArr);
assertEquals( sigString, "30440220182A108E1448DC8F1FB467D06A0F3BB8EA0533584CB954EF8DA112F1D60E39A202201C66F36DA211C087F3AF88B50EDF4F9BDAA6CF5FD6817E74DCA34DB12390C6E9" , "testSignPos");
}
/**
* This tests sign() for a invalid secretkey
*/
@Test
public void testSignNeg() throws AssertFailException{
byte[] data = DatatypeConverter.parseHexBinary("CF80CD8AED482D5D1527D7DC72FCEFF84E6326592848447D2DC0B0E87DFC9A90"); //sha256hash of "testing"
byte[] sec = DatatypeConverter.parseHexBinary("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF");
byte[] resultArr = NativeSecp256k1.sign(data, sec);
String sigString = DatatypeConverter.printHexBinary(resultArr);
assertEquals( sigString, "" , "testSignNeg");
}
/**
* This tests private key tweak-add
*/
@Test
public void testPrivKeyTweakAdd() throws AssertFailException {
byte[] sec = DatatypeConverter.parseHexBinary("67E56582298859DDAE725F972992A07C6C4FB9F62A8FFF58CE3CA926A1063530");
byte[] data = DatatypeConverter.parseHexBinary("3982F19BEF1615BCCFBB05E321C10E1D4CBA3DF0E841C2E41EEB6016347653C3"); //sha256hash of "tweak"
byte[] resultArr = NativeSecp256k1.privKeyTweakAdd( sec , data );
String seckeyString = DatatypeConverter.printHexBinary(resultArr);
assertEquals( seckeyString , "A168571E189E6F9A7E2D657A4B53AE99B909F7E712D1C23CED28093CD57C88F3" , "testPrivKeyTweakAdd");
}
/**
* This tests private key tweak-mul
*/
@Test
public void testPrivKeyTweakMul() throws AssertFailException {
byte[] sec = DatatypeConverter.parseHexBinary("67E56582298859DDAE725F972992A07C6C4FB9F62A8FFF58CE3CA926A1063530");
byte[] data = DatatypeConverter.parseHexBinary("3982F19BEF1615BCCFBB05E321C10E1D4CBA3DF0E841C2E41EEB6016347653C3"); //sha256hash of "tweak"
byte[] resultArr = NativeSecp256k1.privKeyTweakMul( sec , data );
String seckeyString = DatatypeConverter.printHexBinary(resultArr);
assertEquals( seckeyString , "97F8184235F101550F3C71C927507651BD3F1CDB4A5A33B8986ACF0DEE20FFFC" , "testPrivKeyTweakMul");
}
/**
* This tests public key tweak-add
*/
@Test
public void testPubKeyTweakAdd() throws AssertFailException {
byte[] pub = DatatypeConverter.parseHexBinary("040A629506E1B65CD9D2E0BA9C75DF9C4FED0DB16DC9625ED14397F0AFC836FAE595DC53F8B0EFE61E703075BD9B143BAC75EC0E19F82A2208CAEB32BE53414C40");
byte[] data = DatatypeConverter.parseHexBinary("3982F19BEF1615BCCFBB05E321C10E1D4CBA3DF0E841C2E41EEB6016347653C3"); //sha256hash of "tweak"
byte[] resultArr = NativeSecp256k1.pubKeyTweakAdd( pub , data, false);
String pubkeyString = DatatypeConverter.printHexBinary(resultArr);
byte[] resultArrCompressed = NativeSecp256k1.pubKeyTweakAdd( pub , data, true);
String pubkeyStringCompressed = DatatypeConverter.printHexBinary(resultArrCompressed);
assertEquals(pubkeyString , "0411C6790F4B663CCE607BAAE08C43557EDC1A4D11D88DFCB3D841D0C6A941AF525A268E2A863C148555C48FB5FBA368E88718A46E205FABC3DBA2CCFFAB0796EF" , "testPubKeyTweakAdd");
assertEquals(pubkeyStringCompressed , "0311C6790F4B663CCE607BAAE08C43557EDC1A4D11D88DFCB3D841D0C6A941AF52" , "testPubKeyTweakAdd (compressed)");
}
/**
* This tests public key tweak-mul
*/
@Test
public void testPubKeyTweakMul() throws AssertFailException {
byte[] pub = DatatypeConverter.parseHexBinary("040A629506E1B65CD9D2E0BA9C75DF9C4FED0DB16DC9625ED14397F0AFC836FAE595DC53F8B0EFE61E703075BD9B143BAC75EC0E19F82A2208CAEB32BE53414C40");
byte[] data = DatatypeConverter.parseHexBinary("3982F19BEF1615BCCFBB05E321C10E1D4CBA3DF0E841C2E41EEB6016347653C3"); //sha256hash of "tweak"
byte[] resultArr = NativeSecp256k1.pubKeyTweakMul( pub , data, false);
String pubkeyString = DatatypeConverter.printHexBinary(resultArr);
byte[] resultArrCompressed = NativeSecp256k1.pubKeyTweakMul( pub , data, true);
String pubkeyStringCompressed = DatatypeConverter.printHexBinary(resultArrCompressed);
assertEquals(pubkeyString , "04E0FE6FE55EBCA626B98A807F6CAF654139E14E5E3698F01A9A658E21DC1D2791EC060D4F412A794D5370F672BC94B722640B5F76914151CFCA6E712CA48CC589" , "testPubKeyTweakMul");
assertEquals(pubkeyStringCompressed , "03E0FE6FE55EBCA626B98A807F6CAF654139E14E5E3698F01A9A658E21DC1D2791" , "testPubKeyTweakMul (compressed)");
}
/**
* This tests seed randomization
*/
@Test
public void testRandomize() throws AssertFailException {
byte[] seed = DatatypeConverter.parseHexBinary("A441B15FE9A3CF56661190A0B93B9DEC7D04127288CC87250967CF3B52894D11"); //sha256hash of "random"
boolean result = NativeSecp256k1.randomize(seed);
assertEquals( result, true, "testRandomize");
}
/**
* Tests that we can decompress valid public keys
* @throws AssertFailException
*/
@Test
public void testDecompressPubKey() throws AssertFailException {
byte[] compressedPubKey = DatatypeConverter.parseHexBinary("0315EAB529E7D5EB637214EA8EC8ECE5DCD45610E8F4B7CC76A35A6FC27F5DD981");
byte[] result1 = NativeSecp256k1.decompress(compressedPubKey);
byte[] result2 = NativeSecp256k1.decompress(result1); // this is a no-op
String resultString1 = DatatypeConverter.printHexBinary(result1);
String resultString2 = DatatypeConverter.printHexBinary(result2);
assertEquals(resultString1, "0415EAB529E7D5EB637214EA8EC8ECE5DCD45610E8F4B7CC76A35A6FC27F5DD9817551BE3DF159C83045D9DFAC030A1A31DC9104082DB7719C098E87C1C4A36C19", "testDecompressPubKey (compressed)");
assertEquals(resultString2, "0415EAB529E7D5EB637214EA8EC8ECE5DCD45610E8F4B7CC76A35A6FC27F5DD9817551BE3DF159C83045D9DFAC030A1A31DC9104082DB7719C098E87C1C4A36C19", "testDecompressPubKey (no-op)");
}
/**
* Tests that we can check validity of public keys
* @throws AssertFailException
*/
@Test
public void testIsValidPubKeyPos() throws AssertFailException {
byte[] pubkey = DatatypeConverter.parseHexBinary("0456b3817434935db42afda0165de529b938cf67c7510168a51b9297b1ca7e4d91ea59c64516373dd2fe6acc79bb762718bc2659fa68d343bdb12d5ef7b9ed002b");
byte[] compressedPubKey = DatatypeConverter.parseHexBinary("03de961a47a519c5c0fc8e744d1f657f9ea6b9a921d2a3bceb8743e1885f752676");
boolean result1 = NativeSecp256k1.isValidPubKey(pubkey);
boolean result2 = NativeSecp256k1.isValidPubKey(compressedPubKey);
assertEquals(result1, true, "testIsValidPubKeyPos");
assertEquals(result2, true, "testIsValidPubKeyPos (compressed)");
}
@Test
public void testIsValidPubKeyNeg() throws AssertFailException {
//do we have test vectors some where to test this more thoroughly?
byte[] pubkey = DatatypeConverter.parseHexBinary("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF");
boolean result1 = NativeSecp256k1.isValidPubKey(pubkey);
assertEquals(result1, false, "testIsValidPubKeyNeg");
}
@Test
public void testCreateECDHSecret() throws AssertFailException{
byte[] sec = DatatypeConverter.parseHexBinary("67E56582298859DDAE725F972992A07C6C4FB9F62A8FFF58CE3CA926A1063530");
byte[] pub = DatatypeConverter.parseHexBinary("040A629506E1B65CD9D2E0BA9C75DF9C4FED0DB16DC9625ED14397F0AFC836FAE595DC53F8B0EFE61E703075BD9B143BAC75EC0E19F82A2208CAEB32BE53414C40");
byte[] resultArr = NativeSecp256k1.createECDHSecret(sec, pub);
String ecdhString = DatatypeConverter.printHexBinary(resultArr);
assertEquals( ecdhString, "2A2A67007A926E6594AF3EB564FC74005B37A9C8AEF2033C4552051B5C87F043" , "testCreateECDHSecret");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id: PSState.java 1051421 2010-12-21 08:54:25Z jeremias $ */
package org.apache.xmlgraphics.ps;
import java.awt.Color;
import java.awt.geom.AffineTransform;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import org.apache.xmlgraphics.java2d.color.ColorUtil;
/**
* This class holds the current state of the PostScript interpreter.
*
* @version $Id: PSState.java 1051421 2010-12-21 08:54:25Z jeremias $
*/
public class PSState implements Serializable {
/**
*
*/
private static final long serialVersionUID = -3862731539801753248L;
/** Default for setdash */
public static final String DEFAULT_DASH = "[] 0";
/** Default color in PostScript */
public static final Color DEFAULT_RGB_COLOR = Color.black;
private AffineTransform transform = new AffineTransform();
private final List<AffineTransform> transformConcatList = new ArrayList<>();
private int linecap = 0;
private int linejoin = 0;
private float miterLimit = 0;
private double linewidth = 1.0f;
private String dashpattern = DEFAULT_DASH;
private Color color = DEFAULT_RGB_COLOR;
// Font state
private String fontname;
private float fontsize;
/**
* Default constructor
*/
public PSState() {
// nop
}
/**
* Copy constructor
*
* @param org
* the original to copy from
* @param copyTransforms
* true if the list of matrix concats should be cloned, too
*/
public PSState(final PSState org, final boolean copyTransforms) {
this.transform = (AffineTransform) org.transform.clone();
if (copyTransforms) {
this.transformConcatList.addAll(org.transformConcatList);
}
this.linecap = org.linecap;
this.linejoin = org.linejoin;
this.miterLimit = org.miterLimit;
this.linewidth = org.linewidth;
this.dashpattern = org.dashpattern;
this.color = org.color;
this.fontname = org.fontname;
this.fontsize = org.fontsize;
}
/**
* Returns the transform.
*
* @return the current transformation matrix
*/
public AffineTransform getTransform() {
return this.transform;
}
/**
* Check the current transform. The transform for the current state is the
* combination of all transforms in the current state. The parameter is
* compared against this current transform.
*
* @param tf
* the transform the check against
* @return true if the new transform is different then the current transform
*/
public boolean checkTransform(final AffineTransform tf) {
return !tf.equals(this.transform);
}
/**
* Concats the given transformation matrix with the current one.
*
* @param transform
* The new transformation matrix
*/
public void concatMatrix(final AffineTransform transform) {
this.transformConcatList.add(transform);
this.transform.concatenate(transform);
}
/**
* Establishes the specified line cap.
*
* @param value
* line cap (0, 1 or 2) as defined by the setlinecap command
* @return true if the line cap changed compared to the previous setting
*/
public boolean useLineCap(final int value) {
if (this.linecap != value) {
this.linecap = value;
return true;
} else {
return false;
}
}
/**
* Establishes the specified line join.
*
* @param value
* line join (0, 1 or 2) as defined by the setlinejoin command
* @return true if the line join changed compared to the previous setting
*/
public boolean useLineJoin(final int value) {
if (this.linejoin != value) {
this.linejoin = value;
return true;
} else {
return false;
}
}
/**
* Establishes the specified miter limit.
*
* @param value
* the miter limit as defined by the setmiterlimit command
* @return true if the miter limit changed compared to the previous setting
*/
public boolean useMiterLimit(final float value) {
if (this.miterLimit != value) {
this.miterLimit = value;
return true;
} else {
return false;
}
}
/**
* Establishes the specified line width.
*
* @param value
* line width as defined by the setlinewidth command
* @return true if the line width changed compared to the previous setting
*/
public boolean useLineWidth(final double value) {
if (this.linewidth != value) {
this.linewidth = value;
return true;
} else {
return false;
}
}
/**
* Establishes the specified dash.
*
* @param pattern
* dash pattern as defined by the setdash command
* @return true if the dash pattern changed compared to the previous setting
*/
public boolean useDash(final String pattern) {
if (!this.dashpattern.equals(pattern)) {
this.dashpattern = pattern;
return true;
} else {
return false;
}
}
/**
* Establishes the specified color (RGB).
*
* @param value
* color as defined by the setrgbcolor command
* @return true if the color changed compared to the previous setting
*/
public boolean useColor(final Color value) {
if (!ColorUtil.isSameColor(this.color, value)) {
this.color = value;
return true;
} else {
return false;
}
}
/**
* Establishes the specified font and size.
*
* @param name
* name of the font for the "F" command (see FOP Std Proc Set)
* @param size
* size of the font
* @return true if the font changed compared to the previous setting
*/
public boolean useFont(final String name, final float size) {
if (name == null) {
throw new NullPointerException("font name must not be null");
}
if (this.fontname == null || !this.fontname.equals(name)
|| this.fontsize != size) {
this.fontname = name;
this.fontsize = size;
return true;
} else {
return false;
}
}
/**
* Reestablishes the graphics state represented by this instance by issueing
* the necessary commands.
*
* @param gen
* The generator to use for output
* @exception IOException
* In case of an I/O problem
*/
public void reestablish(final PSGenerator gen) throws IOException {
for (int i = 0, len = this.transformConcatList.size(); i < len; ++i) {
gen.concatMatrix(this.transformConcatList.get(i));
}
gen.useLineCap(this.linecap);
gen.useLineWidth(this.linewidth);
gen.useDash(this.dashpattern);
gen.useColor(this.color);
if (this.fontname != null) {
gen.useFont(this.fontname, this.fontsize);
}
}
}
| |
package com.planet_ink.coffee_mud.MOBS;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.DatabaseEngine;
import com.planet_ink.coffee_mud.Libraries.interfaces.XMLLibrary;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2010 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings("unchecked")
public class StdAuctioneer extends StdMOB implements Auctioneer
{
public String ID(){return "StdAuctioneer";}
public StdAuctioneer()
{
super();
Username="an auctioneer";
setDescription("He talks faster than you!");
setDisplayText("The local auctioneer is here calling prices.");
CMLib.factions().setAlignment(this,Faction.ALIGN_GOOD);
setMoney(0);
baseEnvStats.setWeight(150);
setWimpHitPoint(0);
baseCharStats().setStat(CharStats.STAT_INTELLIGENCE,16);
baseCharStats().setStat(CharStats.STAT_CHARISMA,25);
baseEnvStats().setArmor(0);
baseState.setHitPoints(1000);
recoverMaxState();
resetToMaxState();
recoverEnvStats();
recoverCharStats();
}
public AuctionData lastMsgData=null;
protected static final Hashtable lastCheckTimes=new Hashtable();
public CoffeeShop getShop(){
CoffeeShop shop=((CoffeeShop)CMClass.getCommon("AuctionCoffeeShop")).build(this);
shop.addStoreInventory(null);
return shop;
}
public String auctionHouse(){return text();}
public void setAuctionHouse(String name){setMiscText(name);}
protected double timedListingPrice=-1.0;
public double timedListingPrice(){return timedListingPrice;}
public void setTimedListingPrice(double d){timedListingPrice=d;}
protected double timedListingPct=-1.0;
public double timedListingPct(){return timedListingPct;}
public void setTimedListingPct(double d){timedListingPct=d;}
protected double timedFinalCutPct=-1.0;
public double timedFinalCutPct(){return timedFinalCutPct;}
public void setTimedFinalCutPct(double d){timedFinalCutPct=d;}
protected int maxTimedAuctionDays=-1;
public int maxTimedAuctionDays(){return maxTimedAuctionDays;}
public void setMaxTimedAuctionDays(int d){maxTimedAuctionDays=d;}
protected int minTimedAuctionDays=-1;
public int minTimedAuctionDays(){return minTimedAuctionDays;}
public void setMinTimedAuctionDays(int d){minTimedAuctionDays=d;}
public void destroy()
{
super.destroy();
CMLib.map().delAuctionHouse(this);
}
public void bringToLife(Room newLocation, boolean resetStats)
{
super.bringToLife(newLocation,resetStats);
CMLib.map().addAuctionHouse(this);
}
public long getWhatIsSoldMask(){ return DEAL_AUCTIONEER;}
public boolean isSold(int mask){return mask==ShopKeeper.DEAL_AUCTIONEER;}
public void setWhatIsSoldMask(long newSellCode){ }
public void addSoldType(int mask){}
public boolean tick(Tickable ticking, int tickID)
{
if(!super.tick(ticking,tickID))
return false;
if(!CMProps.getBoolVar(CMProps.SYSTEMB_MUDSTARTED)) return true;
if(CMLib.flags().isInTheGame(this,true))
synchronized(("AUCTION_HOUSE_"+auctionHouse().toUpperCase().trim()).intern())
{
Long lastTime=(Long)StdAuctioneer.lastCheckTimes.get(auctionHouse().toUpperCase().trim());
if((lastTime==null)||(System.currentTimeMillis()-lastTime.longValue())>(Tickable.TIME_MILIS_PER_MUDHOUR-5))
{
StdAuctioneer.lastCheckTimes.remove(auctionHouse().toUpperCase().trim());
long thisTime=System.currentTimeMillis();
StdAuctioneer.lastCheckTimes.put(auctionHouse().toUpperCase().trim(),Long.valueOf(thisTime));
Vector auctions=CMLib.coffeeShops().getAuctions(null, auctionHouse());
for(int a=0;a<auctions.size();a++)
{
Auctioneer.AuctionData data=(Auctioneer.AuctionData)auctions.elementAt(a);
if(thisTime>=data.tickDown)
{
if((lastTime==null)||(data.tickDown>lastTime.longValue()))
{
if(data.highBidderM!=null)
{
//CMLib.coffeeShops().returnMoney(data.auctioningM,data.currency,finalAmount);
CMLib.coffeeShops().auctionNotify(data.auctioningM,"Your auction for "+data.auctioningI.name()+" sold to "+data.highBidderM.Name()+" for "+CMLib.beanCounter().nameCurrencyShort(data.currency,data.bid)+". When the high bidder comes to claim "+data.highBidderM.charStats().hisher()+" property, you will automatically receive your payment along with another notice.",data.auctioningI.Name());
CMLib.coffeeShops().auctionNotify(data.highBidderM,"You won the auction for "+data.auctioningI.name()+" for "+CMLib.beanCounter().nameCurrencyShort(data.currency,data.bid)+". Your winnings, along with the difference from your high bid ("+CMLib.beanCounter().nameCurrencyShort(data.currency,data.highBid-data.bid)+") will be given to you as soon as you claim your property. To claim your winnings, come to "+name()+" at "+location().displayText()+" and enter the BUY command for the item again (you will not be charged).",data.auctioningI.Name());
}
else
{
CMLib.coffeeShops().auctionNotify(data.auctioningM,"Your auction for "+data.auctioningI.name()+" went unsold. '"+data.auctioningI.name()+"' has been automatically returned to your inventory.",data.auctioningI.Name());
data.auctioningM.giveItem(data.auctioningI);
if(!CMLib.flags().isInTheGame(data.auctioningM,true))
CMLib.database().DBUpdatePlayerItems(data.auctioningM);
CMLib.coffeeShops().cancelAuction(data);
}
}
}
}
}
}
return true;
}
public void autoGive(MOB src, MOB tgt, Item I)
{
CMMsg msg2=CMClass.getMsg(src,I,null,CMMsg.MSG_DROP,null,CMMsg.MSG_DROP,"GIVE",CMMsg.MSG_DROP,null);
location().send(this,msg2);
msg2=CMClass.getMsg(tgt,I,null,CMMsg.MSG_GET,null,CMMsg.MSG_GET,"GIVE",CMMsg.MSG_GET,null);
location().send(this,msg2);
}
protected String parseBidString(String targetMessage)
{
int x=-1;
if(targetMessage!=null)
{
x=targetMessage.indexOf('\'');
if(x>=0)
{
int y=targetMessage.indexOf('\'',x+1);
if(y>x)
return targetMessage.substring(x+1,y);
}
}
return null;
}
public boolean okMessage(Environmental myHost, CMMsg msg)
{
MOB mob=msg.source();
if((msg.targetMinor()==CMMsg.TYP_EXPIRE)
&&(msg.target()==location())
&&(CMLib.flags().isInTheGame(this,true)))
return false;
else
if(msg.amITarget(this))
{
switch(msg.targetMinor())
{
case CMMsg.TYP_GIVE:
case CMMsg.TYP_SELL:
if(CMLib.flags().aliveAwakeMobileUnbound(mob,true))
{
if(!(msg.tool() instanceof Item))
{
CMLib.commands().postSay(this,mob,"I can't seem to auction "+msg.tool().name()+".",true,false);
return false;
}
if(msg.source().isMonster())
{
CMLib.commands().postSay(this,mob,"You can't sell anything.",true,false);
return false;
}
Item I=(Item)msg.tool();
if((I instanceof Container)&&(((Container)I).getContents().size()>0))
{
CMLib.commands().postSay(this,mob,I.name()+" will have to be emptied first.",true,false);
return false;
}
if(!(I.amWearingAt(Item.IN_INVENTORY)))
{
CMLib.commands().postSay(this,mob,I.name()+" will have to be removed first.",true,false);
return false;
}
AuctionRates aRates=new AuctionRates(this);
CMLib.commands().postSay(this,mob,"Ok, so how many local days will your auction run for ("+aRates.minDays+"-"+aRates.maxDays+")?",true,false);
int days=0;
try{days=CMath.s_int(mob.session().prompt(":","",10000));}catch(Exception e){return false;}
if(days==0) return false;
if(days<aRates.minDays)
{
CMLib.commands().postSay(this,mob,"Minimum number of local days on an auction is "+aRates.minDays+".",true,false);
return false;
}
if(days>aRates.maxDays)
{
CMLib.commands().postSay(this,mob,"Maximum number of local days on an auction is "+aRates.maxDays+".",true,false);
return false;
}
double deposit=aRates.timeListPrice;
deposit+=(aRates.timeListPct*((double)CMath.mul(days,I.baseGoldValue())));
String depositAmt=CMLib.beanCounter().nameCurrencyLong(mob, deposit);
if(CMLib.beanCounter().getTotalAbsoluteValue(mob,CMLib.beanCounter().getCurrency(mob))<deposit)
{
CMLib.commands().postSay(this,mob,"You don't have enough to cover the listing fee of "+depositAmt+". Sell a cheaper item, use fewer days, or come back later.",true,false);
return false;
}
CMLib.commands().postSay(this,mob,"Auctioning "+I.name()+" will cost a listing fee of "+depositAmt+", proceed?",true,false);
try{if(!mob.session().confirm("(Y/N):","Y",10000)) return false;}catch(Exception e){return false;}
lastMsgData=new AuctionData();
lastMsgData.auctioningI=(Item)msg.tool();
lastMsgData.auctioningM=msg.source();
lastMsgData.currency=CMLib.beanCounter().getCurrency(msg.source());
Area area=CMLib.map().getStartArea(this);
if(area==null) area=CMLib.map().getStartArea(msg.source());
lastMsgData.tickDown=System.currentTimeMillis()+(days*area.getTimeObj().getHoursInDay()*Tickable.TIME_MILIS_PER_MUDHOUR)+60000;
return super.okMessage(myHost, msg);
}
return false;
case CMMsg.TYP_BID:
if(CMLib.flags().aliveAwakeMobileUnbound(mob,true))
{
if(!CMLib.coffeeShops().ignoreIfNecessary(msg.source(),finalIgnoreMask(),this))
return false;
if((msg.targetMinor()==CMMsg.TYP_BUY)&&(msg.tool()!=null)&&(!msg.tool().okMessage(myHost,msg)))
return false;
String bidStr=parseBidString(msg.targetMessage());
if(bidStr==null)
{
CMLib.commands().postSay(this,mob,"I can't seem to do business with you.",true,false);
return false;
}
if(msg.tool().envStats().level()>msg.source().envStats().level())
{
CMLib.commands().postSay(this,msg.source(),"That's too advanced for you, I'm afraid.",true,false);
return false;
}
String itemName=msg.tool().name();
if((((Item)msg.tool()).expirationDate()>0)&&(((Item)msg.tool()).expirationDate()<1000))
itemName+="."+((Item)msg.tool()).expirationDate();
AuctionData data=CMLib.coffeeShops().getEnumeratedAuction(itemName, auctionHouse());
if(data==null) data=CMLib.coffeeShops().getEnumeratedAuction(msg.tool().name(), auctionHouse());
if(data==null)
{
CMLib.commands().postSay(this,mob,"That's not up for auction.",true,false);
return false;
}
if(data.auctioningM==msg.source())
{
Auctioneer.AuctionRates rates=new Auctioneer.AuctionRates(this);
if((rates.minDays>0)&&(rates.minDays>=data.daysEllapsed(mob,this)))
{
CMLib.commands().postSay(this,mob,"You may not close this auction until it has been active for "+rates.minDays+" days.",true,false);
return false;
}
if(msg.source().session()!=null)
{
try{
if(!msg.source().session().confirm("This will cancel your auction on "+data.auctioningI.name()+", are you sure (y/N)?","N",10000))
return false;
}catch(Exception e){return false;}
}
}
else
if(System.currentTimeMillis()>=data.tickDown)
{
if(data.highBidderM==msg.source())
{
CMLib.commands().postSay(this,mob,"You have won this auction -- use the BUY command to complete the transaction.",true,false);
return false;
}
CMLib.commands().postSay(this,mob,"That auction is closed.",true,false);
return false;
}
else
{
Object[] bidAmts=CMLib.english().parseMoneyStringSDL(mob,bidStr,data.currency);
String myCurrency=(String)bidAmts[0];
double myDenomination=((Double)bidAmts[1]).doubleValue();
long myCoins=((Long)bidAmts[2]).longValue();
double bid=CMath.mul(myCoins,myDenomination);
if(!myCurrency.equals(data.currency))
{
String currencyName=CMLib.beanCounter().getDenominationName(data.currency);
CMLib.commands().postSay(this,mob,"This auction is being handled in "+currencyName+".",true,false);
return false;
}
if(CMLib.beanCounter().getTotalAbsoluteValue(mob,data.currency)<bid)
{
String currencyName=CMLib.beanCounter().getDenominationName(data.currency);
CMLib.commands().postSay(this,mob,"You don't have enough "+currencyName+" on hand to cover your bid.",true,false);
return false;
}
}
return super.okMessage(myHost, msg);
}
return false;
case CMMsg.TYP_BUY:
if(CMLib.flags().aliveAwakeMobileUnbound(mob,true))
{
if(!CMLib.coffeeShops().ignoreIfNecessary(msg.source(),finalIgnoreMask(),this))
return false;
if((msg.targetMinor()==CMMsg.TYP_BUY)&&(msg.tool()!=null)&&(!msg.tool().okMessage(myHost,msg)))
return false;
if(msg.tool().envStats().level()>msg.source().envStats().level())
{
CMLib.commands().postSay(this,msg.source(),"That's too advanced for you, I'm afraid.",true,false);
return false;
}
String itemName=msg.tool().name();
if((((Item)msg.tool()).expirationDate()>0)&&(((Item)msg.tool()).expirationDate()<1000))
itemName+="."+((Item)msg.tool()).expirationDate();
AuctionData data=CMLib.coffeeShops().getEnumeratedAuction(itemName, auctionHouse());
if(data==null) data=CMLib.coffeeShops().getEnumeratedAuction(msg.tool().name(), auctionHouse());
if(data==null)
{
CMLib.commands().postSay(this,mob,"That's not up for auction.",true,false);
return false;
}
else
if(data.auctioningM==msg.source())
{
Auctioneer.AuctionRates rates=new Auctioneer.AuctionRates(this);
if((rates.minDays>0)&&(rates.minDays>=data.daysEllapsed(mob,this)))
{
CMLib.commands().postSay(this,mob,"You may not close this auction until it has been active for "+rates.minDays+" days.",true,false);
return false;
}
if(msg.source().session()!=null)
{
try{
if(!msg.source().session().confirm("This will cancel your auction on "+data.auctioningI.name()+", are you sure (y/N)?","N",10000))
return false;
}catch(Exception e){return false;}
}
}
else
if(System.currentTimeMillis()>=data.tickDown)
{
if(data.highBidderM==msg.source())
{
}
else
{
CMLib.commands().postSay(this,mob,"That auction is closed.",true,false);
return false;
}
}
else
{
if(data.buyOutPrice<=0.0)
{
CMLib.commands().postSay(this,mob,"You'll have to BID on that. BUY is not available for that particular item.",true,false);
return false;
}
else
if(CMLib.beanCounter().getTotalAbsoluteValue(mob,data.currency)<data.buyOutPrice)
{
String currencyName=CMLib.beanCounter().getDenominationName(data.currency);
CMLib.commands().postSay(this,mob,"You don't have enough "+currencyName+" on hand to buy that.",true,false);
return false;
}
}
return super.okMessage(myHost, msg);
}
return false;
case CMMsg.TYP_VALUE:
if(CMLib.flags().aliveAwakeMobileUnbound(mob,true))
{
if(!CMLib.coffeeShops().ignoreIfNecessary(msg.source(),finalIgnoreMask(),this))
return false;
return super.okMessage(myHost, msg);
}
return false;
case CMMsg.TYP_VIEW:
if(CMLib.flags().aliveAwakeMobileUnbound(mob,true))
{
if(!CMLib.coffeeShops().ignoreIfNecessary(msg.source(),finalIgnoreMask(),this))
return false;
return super.okMessage(myHost, msg);
}
return false;
default:
break;
}
}
return super.okMessage(myHost,msg);
}
public void executeMsg(Environmental myHost, CMMsg msg)
{
MOB mob=msg.source();
if(msg.amITarget(this))
{
switch(msg.targetMinor())
{
case CMMsg.TYP_GIVE:
case CMMsg.TYP_SELL:
if(CMLib.flags().aliveAwakeMobileUnbound(mob,true))
{
AuctionData thisData=lastMsgData;
if((thisData==null)||(thisData.auctioningM!=msg.source())||(msg.source().isMonster()))
{
lastMsgData=null;
CMLib.commands().postSay(this,mob,"I'm confused. Please try to SELL again.",true,false);
}
else
try{
double lowestDenom=CMLib.beanCounter().getLowestDenomination(thisData.currency);
CMLib.commands().postSay(this,mob,"What would you like your opening price to be (in "+CMLib.beanCounter().getDenominationName(thisData.currency, lowestDenom)+"?",true,false);
String openPrice=mob.session().prompt(": ",30000);
CMLib.commands().postSay(this,mob,"What would you like your buy-now price to be (in "+CMLib.beanCounter().getDenominationName(thisData.currency, lowestDenom)+"?",true,false);
String buyPrice=mob.session().prompt(": ",30000);
thisData.bid=CMath.s_double(openPrice)*lowestDenom;
if(thisData.bid<0.0) thisData.bid=0.0;
thisData.buyOutPrice=CMath.s_double(buyPrice)*lowestDenom;
if(thisData.buyOutPrice<=0.0) thisData.buyOutPrice=-1.0;
thisData.start=System.currentTimeMillis();
CMLib.coffeeShops().saveAuction(thisData, auctionHouse(),false);
CMLib.commands().postChannel(this,"AUCTION","New "+thisData.daysRemaining(thisData.auctioningM,msg.source())+" day auction: "+thisData.auctioningI.name(),true);
AuctionRates aRates=new AuctionRates(this);
double deposit=aRates.timeListPrice;
deposit+=(aRates.timeListPct*((double)CMath.mul(thisData.daysRemaining(mob,this),thisData.auctioningI.baseGoldValue())));
CMLib.beanCounter().subtractMoney(mob,deposit);
thisData.auctioningI.destroy();
CMLib.commands().postSay(this,mob,"Your auction for "+thisData.auctioningI.name()+" is now open. When it is done, you will receive either your winnings automatically, or the returned item automatically.",true,false);
}catch(Exception e){}
}
super.executeMsg(myHost,msg);
break;
case CMMsg.TYP_BUY:
super.executeMsg(myHost,msg);
if(CMLib.flags().aliveAwakeMobileUnbound(mob,true))
{
if(msg.tool() instanceof Item)
{
String itemName=msg.tool().name();
if((((Item)msg.tool()).expirationDate()>0)&&(((Item)msg.tool()).expirationDate()<1000))
itemName+="."+((Item)msg.tool()).expirationDate();
AuctionData data=CMLib.coffeeShops().getEnumeratedAuction(itemName, auctionHouse());
if(data==null) data=CMLib.coffeeShops().getEnumeratedAuction(msg.tool().name(), auctionHouse());
if(data==null)
CMLib.commands().postSay(this,mob,"That's not up for auction.",true,false);
else
if(data.auctioningM==mob)
{
CMLib.coffeeShops().cancelAuction(data);
CMLib.commands().postSay(this,mob,"Your auction for "+data.auctioningI.name()+" has been canceled.",true,false);
}
else
if((System.currentTimeMillis()>=data.tickDown)&&(data.highBidderM==mob))
{
Auctioneer.AuctionRates aRates=new Auctioneer.AuctionRates();
double houseCut=Math.floor(data.bid*aRates.timeCutPct);
double finalAmount=data.bid-houseCut;
CMLib.coffeeShops().returnMoney(data.auctioningM,data.currency,finalAmount);
CMLib.coffeeShops().auctionNotify(data.auctioningM,data.highBidderM.Name()+", who won your auction for "+data.auctioningI.name()+" has claimed "+data.highBidderM.charStats().hisher()+" property. You have been credited with "+CMLib.beanCounter().nameCurrencyShort(data.currency,finalAmount)+", after the house took a cut of "+CMLib.beanCounter().nameCurrencyShort(data.currency,houseCut)+".",data.auctioningI.Name());
//CMLib.coffeeShops().auctionNotify(data.highBidderM,"You won the auction for "+data.auctioningI.name()+" for "+CMLib.beanCounter().nameCurrencyShort(data.currency,data.bid)+". The difference from your high bid ("+CMLib.beanCounter().nameCurrencyShort(data.currency,data.highBid-data.bid)+") has been returned to you along with the winning item.",data.auctioningI.Name());
if((data.highBid-data.bid)>0.0)
CMLib.commands().postSay(this,mob,"Congratulations, and here is your "+CMLib.beanCounter().nameCurrencyShort(data.currency,data.highBid-data.bid)+" in change as well.",true,false);
else
CMLib.commands().postSay(this,mob,"Congratulations!",true,false);
CMLib.coffeeShops().returnMoney(mob,data.currency,data.highBid-data.bid);
CMLib.coffeeShops().purchaseItems(data.auctioningI,CMParms.makeVector(data.auctioningI),this,mob);
if(!CMath.bset(msg.targetCode(),CMMsg.MASK_OPTIMIZE))
mob.location().recoverRoomStats();
CMLib.database().DBDeleteJournal(data.auctionDBKey);
}
else
if(System.currentTimeMillis()<data.tickDown)
{
Auctioneer.AuctionRates aRates=new Auctioneer.AuctionRates();
double houseCut=Math.floor(data.buyOutPrice*aRates.timeCutPct);
double finalAmount=data.buyOutPrice-houseCut;
CMLib.coffeeShops().returnMoney(data.auctioningM,data.currency,finalAmount);
CMLib.coffeeShops().auctionNotify(data.auctioningM,"Your auction for "+data.auctioningI.name()+" sold to "+mob.Name()+" for "+CMLib.beanCounter().nameCurrencyShort(data.currency,data.buyOutPrice)+", after the house took a cut of "+CMLib.beanCounter().nameCurrencyShort(data.currency,houseCut)+".",data.auctioningI.Name());
CMLib.beanCounter().subtractMoney(mob,data.currency,data.buyOutPrice);
CMLib.coffeeShops().purchaseItems(data.auctioningI,CMParms.makeVector(data.auctioningI),this,mob);
CMLib.database().DBDeleteJournal(data.auctionDBKey);
}
}
else
CMLib.commands().postSay(this,mob,"I can't seem to auction "+msg.tool().name()+".",true,false);
}
break;
case CMMsg.TYP_BID:
super.executeMsg(myHost,msg);
if(CMLib.flags().aliveAwakeMobileUnbound(mob,true))
{
if(msg.tool() instanceof Item)
{
String itemName=msg.tool().name();
if((((Item)msg.tool()).expirationDate()>0)&&(((Item)msg.tool()).expirationDate()<1000))
itemName+="."+((Item)msg.tool()).expirationDate();
AuctionData data=CMLib.coffeeShops().getEnumeratedAuction(itemName, auctionHouse());
if(data==null) data=CMLib.coffeeShops().getEnumeratedAuction(msg.tool().name(), auctionHouse());
if(data==null)
CMLib.commands().postSay(this,mob,"That's not up for auction.",true,false);
else
if(data.auctioningM==mob)
{
CMLib.coffeeShops().cancelAuction(data);
CMLib.commands().postSay(this,mob,"Your auction for "+data.auctioningI.name()+" has been canceled.",true,false);
}
else
{
String bidStr=parseBidString(msg.targetMessage());
if(bidStr==null)
{
CMLib.commands().postSay(this,mob,"I can't seem to do business with you.",true,false);
return;
}
Object[] bidAmts=CMLib.english().parseMoneyStringSDL(mob,bidStr,data.currency);
String myCurrency=(String)bidAmts[0];
double myDenomination=((Double)bidAmts[1]).doubleValue();
long myCoins=((Long)bidAmts[2]).longValue();
double bid=CMath.mul(myCoins,myDenomination);
MOB M=data.highBidderM;
double oldBid=data.bid;
double oldMaxBid=data.highBid;
String[] resp=CMLib.coffeeShops().bid(mob, bid, myCurrency,data, data.auctioningI,new Vector());
if(resp!=null)
{
if(resp[0]!=null)
mob.tell(resp[0]);
if((resp[1]!=null)&&(M!=null))
CMLib.coffeeShops().auctionNotify(M,resp[1],data.auctioningI.name());
}
if((oldBid!=data.bid)||(oldMaxBid!=data.highBid))
CMLib.coffeeShops().saveAuction(data, auctionHouse(),true);
}
}
else
CMLib.commands().postSay(this,mob,"I can't seem to auction "+msg.tool().name()+".",true,false);
}
break;
case CMMsg.TYP_VALUE:
super.executeMsg(myHost,msg);
if(CMLib.flags().aliveAwakeMobileUnbound(mob,true))
{
CMLib.commands().postSay(this,mob,"That's for the people to decide. Why don't you use the SELL command and see what you can get?",true,false);
return;
}
break;
case CMMsg.TYP_VIEW:
super.executeMsg(myHost,msg);
if(msg.tool() instanceof Item)
{
String itemName=msg.tool().name();
if((((Item)msg.tool()).expirationDate()>0)&&(((Item)msg.tool()).expirationDate()<1000))
itemName+="."+((Item)msg.tool()).expirationDate();
AuctionData data=CMLib.coffeeShops().getEnumeratedAuction(itemName, auctionHouse());
if(data==null) data=CMLib.coffeeShops().getEnumeratedAuction(msg.tool().name(), auctionHouse());
if(data==null)
CMLib.commands().postSay(this,mob,"That's not up for auction.",true,false);
else
{
String price=CMLib.beanCounter().nameCurrencyShort(data.currency,data.bid);
String buyOut=(data.buyOutPrice<=0.0)?null:CMLib.beanCounter().nameCurrencyShort(data.currency,data.buyOutPrice);
StringBuffer str=new StringBuffer(CMLib.coffeeShops().getViewDescription(msg.tool())+"\n\r\n\rThe current bid on "+msg.tool().name()+" is "+price+". Use the BID command to place your own bid. ");
if(buyOut!=null) str.append("You may also buy this item immediately for "+buyOut+" by using the BUY command.");
CMLib.commands().postSay(this,mob,str.toString(),true,false);
}
}
break;
case CMMsg.TYP_LIST:
{
super.executeMsg(myHost,msg);
if(CMLib.flags().aliveAwakeMobileUnbound(mob,true))
{
String forMask=CMLib.coffeeShops().getListForMask(msg.targetMessage());
String s=CMLib.coffeeShops().getAuctionInventory(this,mob,this,forMask);
if(s.length()>0) mob.tell(s);
}
return;
}
default:
super.executeMsg(myHost,msg);
break;
}
}
else
super.executeMsg(myHost,msg);
}
public String storeKeeperString(){return CMLib.coffeeShops().storeKeeperString(getShop());}
public boolean doISellThis(Environmental thisThang){return CMLib.coffeeShops().doISellThis(thisThang,this);}
protected Area getStartArea(){
Area A=CMLib.map().getStartArea(this);
if(A==null) CMLib.map().areaLocation(this);
if(A==null) A=(Area)CMLib.map().areas().nextElement();
return A;
}
public String finalPrejudiceFactors(){
if(prejudiceFactors().length()>0) return prejudiceFactors();
return getStartArea().finalPrejudiceFactors();
}
public String prejudiceFactors(){return CMLib.encoder().decompressString(miscText);}
public void setPrejudiceFactors(String factors){miscText=CMLib.encoder().compressString(factors);}
public String finalIgnoreMask(){
if(ignoreMask().length()>0) return ignoreMask();
return getStartArea().finalIgnoreMask();
}
public String ignoreMask(){return "";}
public void setIgnoreMask(String factors){}
public String[] finalItemPricingAdjustments(){
if((itemPricingAdjustments()!=null)&&(itemPricingAdjustments().length>0))
return itemPricingAdjustments();
return getStartArea().finalItemPricingAdjustments();
}
public String[] itemPricingAdjustments(){ return new String[0];}
public void setItemPricingAdjustments(String[] factors){}
public String finalBudget(){
if(budget().length()>0) return budget();
return getStartArea().finalBudget();
}
public String budget(){return "";}
public void setBudget(String factors){}
public String finalDevalueRate(){
if(devalueRate().length()>0) return devalueRate();
return getStartArea().finalDevalueRate();
}
public String devalueRate(){return "";}
public void setDevalueRate(String factors){}
public int finalInvResetRate(){
if(invResetRate()!=0) return invResetRate();
return getStartArea().finalInvResetRate();
}
public int invResetRate(){return 0;}
public void setInvResetRate(int ticks){}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.orm.entities;
import static org.apache.commons.lang.StringUtils.defaultString;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
import javax.persistence.TableGenerator;
import org.apache.ambari.server.state.RepositoryVersionState;
@Table(name = "cluster_version")
@Entity
@TableGenerator(name = "cluster_version_id_generator",
table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "cluster_version_id_seq"
, initialValue = 0
)
@NamedQueries({
@NamedQuery(name = "clusterVersionByClusterAndStackAndVersion", query =
"SELECT clusterVersion FROM ClusterVersionEntity clusterVersion JOIN clusterVersion.clusterEntity cluster " +
"WHERE cluster.clusterName=:clusterName AND clusterVersion.repositoryVersion.stack.stackName=:stackName AND clusterVersion.repositoryVersion.stack.stackVersion=:stackVersion AND clusterVersion.repositoryVersion.version=:version"),
@NamedQuery(name = "clusterVersionByClusterAndState", query =
"SELECT clusterVersion FROM ClusterVersionEntity clusterVersion JOIN clusterVersion.clusterEntity cluster " +
"WHERE cluster.clusterName=:clusterName AND clusterVersion.state=:state"),
@NamedQuery(name = "clusterVersionByCluster", query =
"SELECT clusterVersion FROM ClusterVersionEntity clusterVersion JOIN clusterVersion.clusterEntity cluster " +
"WHERE cluster.clusterName=:clusterName"),
@NamedQuery(name = "clusterVersionByStackVersion", query = "SELECT clusterVersion FROM ClusterVersionEntity clusterVersion WHERE clusterVersion.repositoryVersion.stack.stackName=:stackName AND clusterVersion.repositoryVersion.stack.stackVersion=:stackVersion AND clusterVersion.repositoryVersion.version=:version"),
})
public class ClusterVersionEntity {
@Id
@Column(name = "id", nullable = false, insertable = true, updatable = false)
@GeneratedValue(strategy = GenerationType.TABLE, generator = "cluster_version_id_generator")
private Long id;
@Column(name = "cluster_id", nullable = false, insertable = false, updatable = false)
private Long clusterId;
@ManyToOne
@JoinColumn(name = "cluster_id", referencedColumnName = "cluster_id", nullable = false)
private ClusterEntity clusterEntity;
@ManyToOne
@JoinColumn(name = "repo_version_id", referencedColumnName = "repo_version_id", nullable = false)
private RepositoryVersionEntity repositoryVersion;
@Column(name = "state", nullable = false, insertable = true, updatable = true)
@Enumerated(value = EnumType.STRING)
private RepositoryVersionState state = RepositoryVersionState.CURRENT;
@Basic
@Column(name = "start_time", nullable = false, insertable = true, updatable = true)
private Long startTime = System.currentTimeMillis();
@Basic
@Column(name = "end_time", insertable = true, updatable = true)
private Long endTime;
@Basic
@Column(name = "user_name", insertable = true, updatable = true)
private String userName = "";
/**
* Empty constructor primarily used by unit tests.
*/
public ClusterVersionEntity() {
}
/**
* Full constructor that doesn't have the endTime
* @param cluster Cluster entity
* @param repositoryVersion repository version
* @param state Cluster version state
* @param startTime Time the cluster version reached its first state
* @param userName User who performed the action
*/
public ClusterVersionEntity(ClusterEntity cluster, RepositoryVersionEntity repositoryVersion, RepositoryVersionState state, long startTime, String userName) {
clusterId = cluster.getClusterId();
this.repositoryVersion = repositoryVersion;
clusterEntity = cluster;
this.state = state;
this.startTime = startTime;
this.userName = userName;
}
/**
* Full constructor that does have the endTime
* @param cluster Cluster entity
* @param repositoryVersion repository version
* @param state Cluster version state
* @param startTime Time the cluster version reached its first state
* @param endTime Time the cluster version finalized its state
* @param userName User who performed the action
*/
public ClusterVersionEntity(ClusterEntity cluster, RepositoryVersionEntity repositoryVersion, RepositoryVersionState state, long startTime, long endTime, String userName) {
this(cluster, repositoryVersion, state, startTime, userName);
this.endTime = endTime;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public ClusterEntity getClusterEntity() {
return clusterEntity;
}
public void setClusterEntity(ClusterEntity clusterEntity) {
this.clusterEntity = clusterEntity;
}
public RepositoryVersionState getState() {
return state;
}
public void setState(RepositoryVersionState state) {
this.state = state;
}
public Long getStartTime() { return startTime; }
public void setStartTime(Long startTime) { this.startTime = startTime; }
public Long getEndTime() { return endTime; }
public void setEndTime(Long endTime) { this.endTime = endTime; }
public String getUserName() { return defaultString(userName); }
public void setUserName(String userName) { this.userName = userName; }
public void setRepositoryVersion(RepositoryVersionEntity repositoryVersion) {
this.repositoryVersion = repositoryVersion;
}
public RepositoryVersionEntity getRepositoryVersion() {
return repositoryVersion;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ClusterVersionEntity that = (ClusterVersionEntity) o;
if (id != that.id
|| clusterId != that.clusterId
|| !repositoryVersion.equals(that.repositoryVersion)
|| !state.equals(that.state)
|| !startTime.equals(that.startTime)
|| !endTime.equals(that.endTime)
|| !userName.equals(that.userName)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = id !=null ? id.intValue() : 0;
result = 31 * result + (clusterId != null ? clusterId.hashCode() : 0);
result = 31 * result + (repositoryVersion != null ? repositoryVersion.hashCode() : 0);
result = 31 * result + (state != null ? state.hashCode() : 0);
result = 31 * result + (startTime != null ? startTime.hashCode() : 0);
result = 31 * result + (endTime != null ? endTime.hashCode() : 0);
result = 31 * result + (userName != null ? userName.hashCode() : 0);
return result;
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.impl.sql.execute;
import com.pivotal.gemfirexd.internal.catalog.UUID;
import com.pivotal.gemfirexd.internal.engine.distributed.ResultHolder;
import com.pivotal.gemfirexd.internal.engine.distributed.message.StatementExecutorMessage;
import com.pivotal.gemfirexd.internal.engine.procedure.cohort.OutgoingResultSetImpl;
import com.pivotal.gemfirexd.internal.engine.procedure.coordinate.ProcedureProcessorResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.AbstractGemFireResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GemFireDeleteResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GemFireDistributedResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GemFireInsertResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GemFireRegionSizeResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GemFireResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GemFireUpdateResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GfxdStatisticsVisitor;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GfxdSubqueryResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.NcjPullResultSet;
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.sql.Activation;
import com.pivotal.gemfirexd.internal.iapi.sql.ResultSet;
import com.pivotal.gemfirexd.internal.impl.jdbc.EmbedConnection;
import com.pivotal.gemfirexd.internal.impl.jdbc.EmbedStatement;
/**
* Visitor to collect <tt>ResultSet</tt> statistics from each class. Multiple
* visit overloads is the tradeoff between below explanation and code
* maintainability.
* <p>
* Every leaf classes of ResultSet hierarchy has overloaded version of visit
* method, so that every class is handled individually.
* <p>
* This is different from Derby's original model of <tt>RuntimeStatistics</tt>
* and <tt>ResultSetStatistics</tt> in terms of (a) temporary object creations
* (b) multiple checks of <tt>instanceof</tt> & dynamic cast. This also avoids
* one monolithic function handling for every type of <tt>ResultSet</tt>.
* <p>
*
* Guideline to maintain this class:
* <ul>
* <li>Any <tt><b>concrete</b></tt> class implementing one of the child
* interfaces of <tt>ResultSet</tt> should get added like in the <i>LEAF classes
* section</i>.
* <li>Any <tt><b>abstract</b></tt> class implementing one of the child
* interfaces of <tt>ResultSet</tt> should get added like in the <i>ROOT classes
* section</i>.
* <li>Any class <tt><b>extending</b></tt> existing more than one level deep
* from <tt>ResultSet</tt> should get added like in <i>GRANDCHILD classes
* section</i>.
* </ul>
*
* @author soubhikc
*
*/
public interface ResultSetStatisticsVisitor extends GfxdStatisticsVisitor {
// =================================================================
// ==== XPLAINVisitor methods ====
// =================================================================
/**
* Call this method to reset the visitor for a new run over the statistics. A
* default implementation should call this method automatically at first of a
* call of doXPLAIN().
*/
public void clear();
/**
* This method is the hook method which is called from the TopResultSet. It
* starts the explanation of the current ResultSetStatistics tree and keeps
* the information during one explain run.
* @param genStatementDesc
* generate statement entry or not.
* @param timeStatsEnabled
* state of the timing stats enabled or not on the query node during
* collection. (#44201)
* @param isLocallyExecuted TODO
*/
public void doXPLAIN(
ResultSet rs,
Activation activation,
boolean genStatementDesc,
boolean timeStatsEnabled, boolean isLocallyExecuted) throws StandardException;
/**
* This method gets invoked on the data nodes whereever the query gets routed
* to capture remote query plans.
*
* @param conn
* EmbedConnection on which query got executed.
* @param msg
* ExecutorMessage that gets processed.
* @param rh
* ResultHolder that is holding onto ResultSet.
* @param isLocallyExecuted
* generate statement entry or not.
* @param <T>
* @throws StandardException
*/
public <T> void process(final EmbedConnection conn,
final StatementExecutorMessage<T> msg, ResultHolder rh,
boolean isLocallyExecuted) throws StandardException;
/**
*
* @param conn
* EmbedConnection on which query got executed.
* @param msg
* @param est
* @param isLocallyExecuted TODO
* @param <T>
* @throws StandardException
*/
public <T> void process(EmbedConnection conn,
final StatementExecutorMessage<T> msg, final EmbedStatement est,
boolean isLocallyExecuted) throws StandardException;
/**
* This method informs the visitor about the number of children. It has to be
* called first! by the different explainable nodes before the visit method of
* the visitor gets called. Each node knows how many children he has. The
* visitor can use this information to resolve the relationship of the current
* explained node to above nodes. Due to the top-down, pre-order, depth-first
* traversal of the tree, this information can directly be exploited.
*
* @param noChildren
* the number of children of the current explained node.
*/
public void setNumberOfChildren(
int noChildren);
/**
* Creates a blank copy of the visitor.
*
* @return a new set of statistics collector chain.
*/
public ResultSetStatisticsVisitor getClone();
/**
* Statement Plan UUID that captured the current execution.
*
* @return null if explain_connection is off,
* otherwise the statement id that can be used to extract the query plan.
*/
public UUID getStatementUUID();
// ==================================================================
// additional helper methods
// ==================================================================
public ResultSetStatisticsVisitor getNextCollector();
public void setNextCollector(
ResultSetStatisticsVisitor collector);
// =================================================================
// ==== ROOT classes section ====
// =================================================================
//
// following overloads are not allowed for override.
// purpose of this class otherwise is lost.
// basic derby's hierarchy
public void visit(
ResultSet rs,
int donotHonor);
public void visit(
NoRowsResultSetImpl rs,
int donotHonor);
public void visit(
BasicNoPutResultSetImpl rs,
int donotHonor);
public void visit(
DMLWriteResultSet rs,
int donotHonor);
public void visit(
DMLVTIResultSet rs,
int donotHonor);
// distribution hierarchy
public void visit(
AbstractGemFireResultSet rs,
int donotHonor);
// NoPutResultSet
public void visit(
ScanResultSet rs,
int donotHonor);
// ===============================================================
// ==== GRANDCHILD classes section ===
// ===============================================================
//
// following kind will be taken care by its parent.
public void visit(
DeleteCascadeResultSet rs);
public void visit(
GemFireDeleteResultSet rs);
public void visit(
DistinctGroupedAggregateResultSet rs);
public void visit(
DistinctScanResultSet rs);
public void visit(
MergeJoinResultSet rs);
public void visit(
NestedLoopJoinResultSet rs);
public void visit(
HashJoinResultSet rs);
public void visit(
NestedLoopLeftOuterJoinResultSet rs);
public void visit(
HashLeftOuterJoinResultSet rs);
public void visit(
DistinctScalarAggregateResultSet rs);
public void visit(
BulkTableScanResultSet rs);
public void visit(
MultiProbeTableScanResultSet rs);
// =================================================================
// ==== LEAF classes section ====
// =================================================================
//
// following are kind of leaf in some sense & is profiled.
// NoRowsResultSetImpl hierarchy
public void visit(
CallStatementResultSet rs);
public void visit(
DeleteResultSet rs); // hasChildren
public void visit(
DeleteVTIResultSet rs);
public void visit(
InsertVTIResultSet rs);
public void visit(
UpdateVTIResultSet rs);
public void visit(
InsertResultSet rs);
public void visit(
UpdateResultSet rs);
// DAP related.
public void visit(
OutgoingResultSetImpl rs);
// Distribution related.
public void visit(
GemFireDistributedResultSet rs);
public void visit(
GemFireInsertResultSet rs);
public void visit(
GemFireResultSet rs);
public void visit(
GemFireUpdateResultSet rs); // hasChildren
// CursorResultSet hierarchy
public void visit(
CurrentOfResultSet rs);
public void visit(
DependentResultSet rs);
public void visit(
GroupedAggregateResultSet rs); // hasChildren
public void visit(
HashScanResultSet rs); // hasChildren
public void visit(
HashTableResultSet rs);
public void visit(
IndexRowToBaseRowResultSet rs);
public void visit(
JoinResultSet rs);
public void visit(
MaterializedResultSet rs);
public void visit(
NormalizeResultSet rs);
public void visit(
ProjectRestrictResultSet rs);
public void visit(
RowCountResultSet rs);
public void visit(
RowResultSet rs);
public void visit(
ScalarAggregateResultSet rs);
public void visit(
ScrollInsensitiveResultSet rs);
public void visit(
SetOpResultSet rs);
public void visit(
SortResultSet rs);
public void visit(
TableScanResultSet rs);
public void visit(
UnionResultSet rs);
public void visit(
VTIResultSet rs);
// BasicNoPutResultSetImpl hierarchy
public void visit(
AnyResultSet anyResultSet);
public void visit(
LastIndexKeyResultSet lastIndexKeyResultSet);
public void visit(
MiscResultSet miscResultSet);
public void visit(
OnceResultSet onceResultSet);
public void visit(
ProcedureProcessorResultSet procedureProcessorResultSet);
public void visit(
GfxdSubqueryResultSet gfxdSubqueryResultset);
public void visit(
NcjPullResultSet ncjPullResultset);
public void visit(
TemporaryRowHolderResultSet temporaryRowHolderResultSet);
public void visit(
WindowResultSet windowResultSet);
public void visit(
GemFireRegionSizeResultSet regionSizeResultSet);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.tck.dom.scalars;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Date;
import javax.validation.constraints.Digits;
import org.apache.isis.applib.AbstractDomainObject;
import org.apache.isis.applib.annotation.MemberOrder;
import org.apache.isis.applib.annotation.ObjectType;
import org.apache.isis.applib.annotation.Optional;
import org.apache.isis.applib.annotation.Title;
@javax.jdo.annotations.PersistenceCapable
@javax.jdo.annotations.Discriminator("JDKV")
@javax.jdo.annotations.Query(
name="jdkv_findByStringProperty", language="JDOQL",
value="SELECT FROM org.apache.isis.tck.dom.scalars.JdkValuedEntity WHERE stringProperty == :i")
@ObjectType("JDKV")
public class JdkValuedEntity extends AbstractDomainObject {
// {{ StringProperty (also title, pk)
private String stringProperty;
@javax.jdo.annotations.PrimaryKey
@Title
@Optional
@MemberOrder(sequence = "1")
public String getStringProperty() {
return stringProperty;
}
public void setStringProperty(final String description) {
this.stringProperty = description;
}
// }}
// {{ JavaUtilDateProperty
private Date javaUtilDateProperty;
@Optional
@MemberOrder(sequence = "1")
public Date getJavaUtilDateProperty() {
return javaUtilDateProperty;
}
public void setJavaUtilDateProperty(final Date javaUtilDateProperty) {
this.javaUtilDateProperty = javaUtilDateProperty;
}
// }}
// {{ JavaSqlDateProperty
private java.sql.Date javaSqlDateProperty;
@javax.jdo.annotations.Persistent() // since not persistent by default
@Optional
@MemberOrder(sequence = "1")
public java.sql.Date getJavaSqlDateProperty() {
return javaSqlDateProperty;
}
public void setJavaSqlDateProperty(final java.sql.Date javaSqlDateProperty) {
this.javaSqlDateProperty = javaSqlDateProperty;
}
// }}
// {{ JavaSqlTimeProperty (property)
@javax.jdo.annotations.Persistent() // since not persistent by default
private java.sql.Time javaSqlTimeProperty;
@Optional
@MemberOrder(sequence = "1")
public java.sql.Time getJavaSqlTimeProperty() {
return javaSqlTimeProperty;
}
public void setJavaSqlTimeProperty(final java.sql.Time javaSqlTimeProperty) {
this.javaSqlTimeProperty = javaSqlTimeProperty;
}
// }}
// {{ JavaSqlTimestampProperty
@javax.jdo.annotations.Persistent() // since not persistent by default
private java.sql.Timestamp javaSqlTimestampProperty;
@Optional
@MemberOrder(sequence = "1")
public java.sql.Timestamp getJavaSqlTimestampProperty() {
return javaSqlTimestampProperty;
}
public void setJavaSqlTimestampProperty(final java.sql.Timestamp javaSqlTimestampProperty) {
this.javaSqlTimestampProperty = javaSqlTimestampProperty;
}
// }}
// {{ BigIntegerProperty (to hold values that are larger than a long)
private BigInteger bigIntegerProperty;
@Optional
@MemberOrder(sequence = "1")
public BigInteger getBigIntegerProperty() {
return bigIntegerProperty;
}
public void setBigIntegerProperty(final BigInteger bigIntegerProperty) {
this.bigIntegerProperty = bigIntegerProperty;
}
// }}
// {{ BigIntegerProperty2 (to hold values that can also fit into a long)
private BigInteger bigIntegerProperty2;
@Optional
@MemberOrder(sequence = "1")
public BigInteger getBigIntegerProperty2() {
return bigIntegerProperty2;
}
public void setBigIntegerProperty2(final BigInteger bigIntegerProperty2) {
this.bigIntegerProperty2 = bigIntegerProperty2;
}
// }}
// {{ BigDecimalProperty (to hold values that are larger than a double)
private BigDecimal bigDecimalProperty;
@Digits(integer=20,fraction = 10) // corresponds to big-decimal(30,10)
@Optional
@MemberOrder(sequence = "1")
public BigDecimal getBigDecimalProperty() {
return bigDecimalProperty;
}
public void setBigDecimalProperty(final BigDecimal bigDecimalProperty) {
this.bigDecimalProperty = bigDecimalProperty;
}
// }}
// {{ BigDecimalProperty (to hold values that are larger than a double)
private BigDecimal bigDecimalProperty2;
@Optional
@MemberOrder(sequence = "1")
public BigDecimal getBigDecimalProperty2() {
return bigDecimalProperty2;
}
public void setBigDecimalProperty2(final BigDecimal bigDecimalProperty2) {
this.bigDecimalProperty2 = bigDecimalProperty2;
}
// }}
// {{ MyEnum (property)
private MyEnum myEnum;
@javax.jdo.annotations.Persistent
@Optional
@MemberOrder(sequence = "1")
public MyEnum getMyEnum() {
return myEnum;
}
public void setMyEnum(final MyEnum myEnum) {
this.myEnum = myEnum;
}
// }}
}
| |
/*
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.collect.tuple;
import java.io.Serializable;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.function.BiFunction;
import java.util.function.BinaryOperator;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.ImmutableBean;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaBean;
import org.joda.beans.MetaProperty;
import org.joda.beans.gen.BeanDefinition;
import org.joda.beans.gen.PropertyDefinition;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import org.joda.beans.impl.direct.DirectPrivateBeanBuilder;
import com.google.common.collect.ComparisonChain;
import com.google.common.collect.ImmutableList;
/**
* An immutable pair consisting of two elements.
* <p>
* This implementation refers to the elements as 'first' and 'second'.
* The elements cannot be null.
* <p>
* Although the implementation is immutable, there is no restriction on the objects
* that may be stored. If mutable objects are stored in the pair, then the pair itself
* effectively becomes mutable.
* <p>
* Some primitive specializations of this class are provided, such as {@link DoublesPair}.
* <p>
* This class is immutable and thread-safe if the stored objects are immutable.
*
* @param <A> the first element type
* @param <B> the second element type
*/
@BeanDefinition(builderScope = "private")
public final class Pair<A, B>
implements ImmutableBean, Tuple, Comparable<Pair<A, B>>, Serializable {
/**
* The first element in this pair.
*/
@PropertyDefinition(validate = "notNull")
private final A first;
/**
* The second element in this pair.
*/
@PropertyDefinition(validate = "notNull")
private final B second;
//-------------------------------------------------------------------------
/**
* Obtains a pair inferring the types.
*
* @param <A> the first element type
* @param <B> the second element type
* @param first the first element
* @param second the second element
* @return a pair formed from the two parameters
*/
public static <A, B> Pair<A, B> of(A first, B second) {
return new Pair<>(first, second);
}
//-------------------------------------------------------------------------
/**
* Returns a combiner of pair instances.
* <p>
* This is useful if you have a stream of {@code Pair<A, B>} and would like to reduce.
* <p>
* e.g
* <pre>{@code pairList.stream()
* .reduce(Pair.combining(A::combinedWith, B::combinedWith))
* }</pre>
*
* @param <A> the type of the first values
* @param <B> the type of the second values
* @param combinerFirst the combiner of first values
* @param combinerSecond the combiner of second values
* @return the combiner of pair instance
*/
public static <A, B> BinaryOperator<Pair<A, B>> combining(
BiFunction<? super A, ? super A, ? extends A> combinerFirst,
BiFunction<? super B, ? super B, ? extends B> combinerSecond) {
return (pair1, pair2) -> pair1.combinedWith(pair2, combinerFirst, combinerSecond);
}
/**
* Combines this instance with another.
*
* @param <C> the type of the first value in the other instance
* @param <D> the type of the second value in the other instance
* @param other the other pair
* @param combinerFirst the combiner of first values
* @param combinerSecond the combiner of second values
* @return the combined pair instance
*/
public <C, D> Pair<A, B> combinedWith(
Pair<C, D> other,
BiFunction<? super A, ? super C, ? extends A> combinerFirst,
BiFunction<? super B, ? super D, ? extends B> combinerSecond) {
return Pair.of(combinerFirst.apply(first, other.getFirst()), combinerSecond.apply(second, other.getSecond()));
}
//-------------------------------------------------------------------------
/**
* Gets the number of elements held by this pair.
*
* @return size 2
*/
@Override
public int size() {
return 2;
}
/**
* Gets the elements from this pair as a list.
* <p>
* The list returns each element in the pair in order.
*
* @return the elements as an immutable list
*/
@Override
public ImmutableList<Object> elements() {
return ImmutableList.of(first, second);
}
//-------------------------------------------------------------------------
/**
* Compares the pair based on the first element followed by the second element.
* <p>
* The element types must be {@code Comparable}.
*
* @param other the other pair
* @return negative if this is less, zero if equal, positive if greater
* @throws ClassCastException if either object is not comparable
*/
@Override
public int compareTo(Pair<A, B> other) {
return ComparisonChain.start()
.compare((Comparable<?>) first, (Comparable<?>) other.first)
.compare((Comparable<?>) second, (Comparable<?>) other.second)
.result();
}
/**
* Gets the pair using a standard string format.
* <p>
* The standard format is '[$first, $second]'. Spaces around the values are trimmed.
*
* @return the pair as a string
*/
@Override
public String toString() {
return new StringBuilder()
.append('[')
.append(first)
.append(", ")
.append(second)
.append(']')
.toString();
}
//------------------------- AUTOGENERATED START -------------------------
/**
* The meta-bean for {@code Pair}.
* @return the meta-bean, not null
*/
@SuppressWarnings("rawtypes")
public static Pair.Meta meta() {
return Pair.Meta.INSTANCE;
}
/**
* The meta-bean for {@code Pair}.
* @param <R> the first generic type
* @param <S> the second generic type
* @param cls1 the first generic type
* @param cls2 the second generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R, S> Pair.Meta<R, S> metaPair(Class<R> cls1, Class<S> cls2) {
return Pair.Meta.INSTANCE;
}
static {
MetaBean.register(Pair.Meta.INSTANCE);
}
/**
* The serialization version id.
*/
private static final long serialVersionUID = 1L;
private Pair(
A first,
B second) {
JodaBeanUtils.notNull(first, "first");
JodaBeanUtils.notNull(second, "second");
this.first = first;
this.second = second;
}
@SuppressWarnings("unchecked")
@Override
public Pair.Meta<A, B> metaBean() {
return Pair.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the first element in this pair.
* @return the value of the property, not null
*/
public A getFirst() {
return first;
}
//-----------------------------------------------------------------------
/**
* Gets the second element in this pair.
* @return the value of the property, not null
*/
public B getSecond() {
return second;
}
//-----------------------------------------------------------------------
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
Pair<?, ?> other = (Pair<?, ?>) obj;
return JodaBeanUtils.equal(first, other.first) &&
JodaBeanUtils.equal(second, other.second);
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(first);
hash = hash * 31 + JodaBeanUtils.hashCode(second);
return hash;
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code Pair}.
* @param <A> the type
* @param <B> the type
*/
public static final class Meta<A, B> extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("rawtypes")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code first} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<A> first = (DirectMetaProperty) DirectMetaProperty.ofImmutable(
this, "first", Pair.class, Object.class);
/**
* The meta-property for the {@code second} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<B> second = (DirectMetaProperty) DirectMetaProperty.ofImmutable(
this, "second", Pair.class, Object.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"first",
"second");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 97440432: // first
return first;
case -906279820: // second
return second;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends Pair<A, B>> builder() {
return new Pair.Builder<>();
}
@SuppressWarnings({"unchecked", "rawtypes" })
@Override
public Class<? extends Pair<A, B>> beanType() {
return (Class) Pair.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code first} property.
* @return the meta-property, not null
*/
public MetaProperty<A> first() {
return first;
}
/**
* The meta-property for the {@code second} property.
* @return the meta-property, not null
*/
public MetaProperty<B> second() {
return second;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 97440432: // first
return ((Pair<?, ?>) bean).getFirst();
case -906279820: // second
return ((Pair<?, ?>) bean).getSecond();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code Pair}.
* @param <A> the type
* @param <B> the type
*/
private static final class Builder<A, B> extends DirectPrivateBeanBuilder<Pair<A, B>> {
private A first;
private B second;
/**
* Restricted constructor.
*/
private Builder() {
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case 97440432: // first
return first;
case -906279820: // second
return second;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@SuppressWarnings("unchecked")
@Override
public Builder<A, B> set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case 97440432: // first
this.first = (A) newValue;
break;
case -906279820: // second
this.second = (B) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Pair<A, B> build() {
return new Pair<>(
first,
second);
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(96);
buf.append("Pair.Builder{");
buf.append("first").append('=').append(JodaBeanUtils.toString(first)).append(',').append(' ');
buf.append("second").append('=').append(JodaBeanUtils.toString(second));
buf.append('}');
return buf.toString();
}
}
//-------------------------- AUTOGENERATED END --------------------------
}
| |
/*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.pregelix.dataflow.std;
import java.io.DataOutput;
import java.nio.ByteBuffer;
import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
public class IndexNestedLoopRightOuterJoinOperatorNodePushable extends
AbstractUnaryInputUnaryOutputOperatorNodePushable {
private IndexDataflowHelper treeIndexOpHelper;
private FrameTupleAccessor accessor;
private ByteBuffer writeBuffer;
private FrameTupleAppender appender;
private ArrayTupleBuilder tb;
private DataOutput dos;
private ITreeIndex index;
private boolean isForward;
private RangePredicate rangePred;
private MultiComparator lowKeySearchCmp;
private MultiComparator highKeySearchCmp;
private IIndexCursor cursor;
protected IIndexAccessor indexAccessor;
private RecordDescriptor recDesc;
private final RecordDescriptor inputRecDesc;
private PermutingFrameTupleReference lowKey;
private PermutingFrameTupleReference highKey;
private INullWriter[] nullWriter;
private ITupleReference currentTopTuple;
private boolean match;
public IndexNestedLoopRightOuterJoinOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
int[] lowKeyFields, int[] highKeyFields, INullWriter[] nullWriter) {
inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
treeIndexOpHelper = (IndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
opDesc, ctx, partition);
this.isForward = isForward;
this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
if (lowKeyFields != null && lowKeyFields.length > 0) {
lowKey = new PermutingFrameTupleReference();
lowKey.setFieldPermutation(lowKeyFields);
}
if (highKeyFields != null && highKeyFields.length > 0) {
highKey = new PermutingFrameTupleReference();
highKey.setFieldPermutation(highKeyFields);
}
this.nullWriter = nullWriter;
}
protected void setCursor() {
cursor = indexAccessor.createSearchCursor(false);
}
@Override
public void open() throws HyracksDataException {
accessor = new FrameTupleAccessor(treeIndexOpHelper.getTaskContext().getFrameSize(), recDesc);
try {
treeIndexOpHelper.open();
index = (ITreeIndex) treeIndexOpHelper.getIndexInstance();
writer.open();
// construct range predicate
// TODO: Can we construct the multicmps using helper methods?
int lowKeySearchFields = index.getComparatorFactories().length;
int highKeySearchFields = index.getComparatorFactories().length;
IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
for (int i = 0; i < lowKeySearchFields; i++) {
lowKeySearchComparators[i] = index.getComparatorFactories()[i].createBinaryComparator();
}
lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);
if (lowKeySearchFields == highKeySearchFields) {
highKeySearchCmp = lowKeySearchCmp;
} else {
IBinaryComparator[] highKeySearchComparators = new IBinaryComparator[highKeySearchFields];
for (int i = 0; i < highKeySearchFields; i++) {
highKeySearchComparators[i] = index.getComparatorFactories()[i].createBinaryComparator();
}
highKeySearchCmp = new MultiComparator(highKeySearchComparators);
}
rangePred = new RangePredicate(null, null, true, true, lowKeySearchCmp, highKeySearchCmp);
writeBuffer = treeIndexOpHelper.getTaskContext().allocateFrame();
tb = new ArrayTupleBuilder(inputRecDesc.getFields().length + index.getFieldCount());
dos = tb.getDataOutput();
appender = new FrameTupleAppender(treeIndexOpHelper.getTaskContext().getFrameSize());
appender.reset(writeBuffer, true);
indexAccessor = index.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
setCursor();
/** set the search cursor */
rangePred.setLowKey(null, true);
rangePred.setHighKey(null, true);
cursor.reset();
indexAccessor.search(cursor, rangePred);
/** set up current top tuple */
if (cursor.hasNext()) {
cursor.next();
currentTopTuple = cursor.getTuple();
match = false;
}
} catch (Exception e) {
treeIndexOpHelper.close();
throw new HyracksDataException(e);
}
}
private void writeResults(IFrameTupleAccessor leftAccessor, int tIndex, ITupleReference frameTuple)
throws Exception {
tb.reset();
for (int i = 0; i < inputRecDesc.getFields().length; i++) {
int tupleStart = leftAccessor.getTupleStartOffset(tIndex);
int fieldStart = leftAccessor.getFieldStartOffset(tIndex, i);
int offset = leftAccessor.getFieldSlotsLength() + tupleStart + fieldStart;
int len = leftAccessor.getFieldEndOffset(tIndex, i) - fieldStart;
dos.write(leftAccessor.getBuffer().array(), offset, len);
tb.addFieldEndOffset();
}
for (int i = 0; i < frameTuple.getFieldCount(); i++) {
dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
tb.addFieldEndOffset();
}
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
FrameUtils.flushFrame(writeBuffer, writer);
appender.reset(writeBuffer, true);
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
throw new IllegalStateException();
}
}
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
accessor.reset(buffer);
int tupleCount = accessor.getTupleCount();
try {
for (int i = 0; i < tupleCount && currentTopTuple != null;) {
if (lowKey != null)
lowKey.reset(accessor, i);
if (highKey != null)
highKey.reset(accessor, i);
// TODO: currently use low key only, check what they mean
int cmp = compare(lowKey, currentTopTuple);
if ((cmp <= 0 && isForward) || (cmp >= 0 && !isForward)) {
if (cmp == 0)
outputMatch(i);
i++;
} else {
moveTreeCursor();
}
}
} catch (Exception e) {
throw new HyracksDataException(e);
}
}
private void outputMatch(int i) throws Exception {
writeResults(accessor, i, currentTopTuple);
match = true;
}
private void moveTreeCursor() throws Exception {
if (!match) {
writeResults(currentTopTuple);
}
if (cursor.hasNext()) {
cursor.next();
currentTopTuple = cursor.getTuple();
match = false;
} else {
currentTopTuple = null;
}
}
@Override
public void close() throws HyracksDataException {
try {
while (currentTopTuple != null) {
moveTreeCursor();
}
if (appender.getTupleCount() > 0) {
FrameUtils.flushFrame(writeBuffer, writer);
}
writer.close();
try {
cursor.close();
} catch (Exception e) {
throw new HyracksDataException(e);
}
} catch (Exception e) {
throw new HyracksDataException(e);
} finally {
treeIndexOpHelper.close();
}
}
@Override
public void fail() throws HyracksDataException {
try {
cursor.close();
} catch (Exception e) {
throw new HyracksDataException(e);
} finally {
treeIndexOpHelper.close();
}
writer.fail();
}
/** compare tuples */
private int compare(ITupleReference left, ITupleReference right) throws Exception {
return lowKeySearchCmp.compare(left, right);
}
/** write result for outer case */
private void writeResults(ITupleReference frameTuple) throws Exception {
tb.reset();
for (int i = 0; i < inputRecDesc.getFields().length; i++) {
nullWriter[i].writeNull(dos);
tb.addFieldEndOffset();
}
for (int i = 0; i < frameTuple.getFieldCount(); i++) {
dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
tb.addFieldEndOffset();
}
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
FrameUtils.flushFrame(writeBuffer, writer);
appender.reset(writeBuffer, true);
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
throw new IllegalStateException();
}
}
}
}
| |
/*
* Copyright 2014 - 2015 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.real_logic.aeron;
import uk.co.real_logic.aeron.logbuffer.BufferClaim;
import uk.co.real_logic.aeron.logbuffer.LogBufferDescriptor;
import uk.co.real_logic.aeron.logbuffer.TermAppender;
import uk.co.real_logic.agrona.DirectBuffer;
import uk.co.real_logic.agrona.concurrent.UnsafeBuffer;
import uk.co.real_logic.agrona.concurrent.status.ReadablePosition;
import static uk.co.real_logic.aeron.logbuffer.LogBufferDescriptor.*;
/**
* Aeron Publisher API for sending messages to subscribers of a given channel and streamId pair. Publishers
* are created via an {@link Aeron} object, and messages are sent via an offer method or a claim and commit
* method combination.
* <p>
* The APIs used to send are all non-blocking.
* <p>
* Note: Publication instances are threadsafe and can be shared between publisher threads.
* @see Aeron#addPublication(String, int)
*/
public class Publication implements AutoCloseable
{
/**
* The publication is not yet connected to a subscriber.
*/
public static final long NOT_CONNECTED = -1;
/**
* The offer failed due to back pressure preventing further transmission.
*/
public static final long BACK_PRESSURED = -2;
private final long registrationId;
private final int streamId;
private final int sessionId;
private final String channel;
private final ClientConductor clientConductor;
private final LogBuffers logBuffers;
private final TermAppender[] termAppenders = new TermAppender[PARTITION_COUNT];
private final ReadablePosition publicationLimit;
private final UnsafeBuffer logMetaDataBuffer;
private final int positionBitsToShift;
private int refCount = 0;
private volatile boolean isClosed = false;
Publication(
final ClientConductor clientConductor,
final String channel,
final int streamId,
final int sessionId,
final ReadablePosition publicationLimit,
final LogBuffers logBuffers,
final long registrationId)
{
final UnsafeBuffer[] buffers = logBuffers.atomicBuffers();
final UnsafeBuffer logMetaDataBuffer = buffers[LOG_META_DATA_SECTION_INDEX];
final UnsafeBuffer[] defaultFrameHeaders = defaultFrameHeaders(logMetaDataBuffer);
final int mtuLength = mtuLength(logMetaDataBuffer);
for (int i = 0; i < PARTITION_COUNT; i++)
{
termAppenders[i] = new TermAppender(buffers[i], buffers[i + PARTITION_COUNT], defaultFrameHeaders[i], mtuLength);
}
this.clientConductor = clientConductor;
this.channel = channel;
this.streamId = streamId;
this.sessionId = sessionId;
this.logBuffers = logBuffers;
this.logMetaDataBuffer = logMetaDataBuffer;
this.registrationId = registrationId;
this.publicationLimit = publicationLimit;
this.positionBitsToShift = Integer.numberOfTrailingZeros(buffers[0].capacity());
}
/**
* Media address for delivery to the channel.
*
* @return Media address for delivery to the channel.
*/
public String channel()
{
return channel;
}
/**
* Stream identity for scoping within the channel media address.
*
* @return Stream identity for scoping within the channel media address.
*/
public int streamId()
{
return streamId;
}
/**
* Session under which messages are published. Identifies this Publication instance.
*
* @return the session id for this publication.
*/
public int sessionId()
{
return sessionId;
}
/**
* Maximum message length supported in bytes.
*
* @return maximum message length supported in bytes.
*/
public int maxMessageLength()
{
return termAppenders[0].maxMessageLength();
}
/**
* Release resources used by this Publication when there are no more references.
*
* Publications are reference counted and are only truly closed when the ref count reaches zero.
*/
public void close()
{
synchronized (clientConductor)
{
if (--refCount == 0)
{
release();
}
}
}
/**
* Release resources and forcibly close the Publication regardless of reference count.
*/
void release()
{
if (!isClosed)
{
isClosed = true;
clientConductor.releasePublication(this);
logBuffers.close();
}
}
/**
* Get the current position to which the publication has advanced for this stream.
*
* @return the current position to which the publication has advanced for this stream.
* @throws IllegalStateException if the publication is closed.
*/
public long position()
{
ensureOpen();
final int initialTermId = initialTermId(logMetaDataBuffer);
final int activeTermId = activeTermId(logMetaDataBuffer);
final int currentTail = termAppenders[indexByTerm(initialTermId, activeTermId)].tailVolatile();
return computePosition(activeTermId, currentTail, positionBitsToShift, initialTermId);
}
/**
* Non-blocking publish of a buffer containing a message.
*
* @param buffer containing message.
* @return The new stream position on success, otherwise {@link #BACK_PRESSURED} or {@link #NOT_CONNECTED}.
*/
public long offer(final DirectBuffer buffer)
{
return offer(buffer, 0, buffer.capacity());
}
/**
* Non-blocking publish of a partial buffer containing a message.
*
* @param buffer containing message.
* @param offset offset in the buffer at which the encoded message begins.
* @param length in bytes of the encoded message.
* @return The new stream position on success, otherwise {@link #BACK_PRESSURED} or {@link #NOT_CONNECTED}.
* @throws IllegalStateException if the publication is closed.
*/
public long offer(final DirectBuffer buffer, final int offset, final int length)
{
ensureOpen();
final long limit = publicationLimit.getVolatile();
final int initialTermId = initialTermId(logMetaDataBuffer);
final int activeTermId = activeTermId(logMetaDataBuffer);
final int activeIndex = indexByTerm(initialTermId, activeTermId);
final TermAppender termAppender = termAppenders[activeIndex];
final int currentTail = termAppender.rawTailVolatile();
final long position = computePosition(activeTermId, currentTail, positionBitsToShift, initialTermId);
long newPosition = BACK_PRESSURED;
if (position < limit)
{
final int nextOffset = termAppender.append(buffer, offset, length);
newPosition = newPosition(activeTermId, activeIndex, currentTail, position, nextOffset);
}
else if (0 == limit)
{
newPosition = NOT_CONNECTED;
}
return newPosition;
}
/**
* Try to claim a range in the publication log into which a message can be written with zero copy semantics.
* Once the message has been written then {@link BufferClaim#commit()} should be called thus making it available.
* <p>
* <b>Note:</b> This method can only be used for message lengths less than MTU length minus header.
*
* <pre>{@code
* final BufferClaim bufferClaim = new BufferClaim(); // Can be stored and reused to avoid allocation
*
* if (publication.tryClaim(messageLength, bufferClaim))
* {
* try
* {
* final MutableDirectBuffer buffer = bufferClaim.buffer();
* final int offset = bufferClaim.offset();
*
* // Work with buffer directly or wrap with a flyweight
* }
* finally
* {
* bufferClaim.commit();
* }
* }
* }</pre>
*
* @param length of the range to claim, in bytes..
* @param bufferClaim to be populated if the claim succeeds.
* @return The new stream position on success, otherwise {@link #BACK_PRESSURED} or {@link #NOT_CONNECTED}.
* @throws IllegalArgumentException if the length is greater than max payload length within an MTU.
* @throws IllegalStateException if the publication is closed.
* @see BufferClaim#commit()
*/
public long tryClaim(final int length, final BufferClaim bufferClaim)
{
ensureOpen();
final long limit = publicationLimit.getVolatile();
final int initialTermId = initialTermId(logMetaDataBuffer);
final int activeTermId = activeTermId(logMetaDataBuffer);
final int activeIndex = indexByTerm(initialTermId, activeTermId);
final TermAppender termAppender = termAppenders[activeIndex];
final int currentTail = termAppender.rawTailVolatile();
final long position = computePosition(activeTermId, currentTail, positionBitsToShift, initialTermId);
long newPosition = BACK_PRESSURED;
if (position < limit)
{
final int nextOffset = termAppender.claim(length, bufferClaim);
newPosition = newPosition(activeTermId, activeIndex, currentTail, position, nextOffset);
}
else if (0 == limit)
{
newPosition = NOT_CONNECTED;
}
return newPosition;
}
long registrationId()
{
return registrationId;
}
/**
* @see Publication#close()
*/
void incRef()
{
synchronized (clientConductor)
{
++refCount;
}
}
private long newPosition(
final int activeTermId, final int activeIndex, final int currentTail, final long position, final int nextOffset)
{
long newPosition = BACK_PRESSURED;
if (nextOffset > 0)
{
newPosition = (position - currentTail) + nextOffset;
}
else if (nextOffset == TermAppender.TRIPPED)
{
final int newTermId = activeTermId + 1;
final int nextIndex = nextPartitionIndex(activeIndex);
final int nextNextIndex = nextPartitionIndex(nextIndex);
LogBufferDescriptor.defaultHeaderTermId(logMetaDataBuffer, nextIndex, newTermId);
// Need to advance the term id in case a publication takes an interrupt
// between reading the active term and incrementing the tail.
// This covers the case of an interrupt taking longer than
// the time taken to complete the current term.
LogBufferDescriptor.defaultHeaderTermId(logMetaDataBuffer, nextNextIndex, newTermId + 1);
termAppenders[nextNextIndex].statusOrdered(NEEDS_CLEANING);
LogBufferDescriptor.activeTermId(logMetaDataBuffer, newTermId);
}
return newPosition;
}
private void ensureOpen()
{
if (isClosed)
{
throw new IllegalStateException(String.format(
"Publication is closed: channel=%s streamId=%d sessionId=%d registrationId=%d",
channel, streamId, sessionId, registrationId));
}
}
}
| |
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.objc;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.rules.objc.CompilationSupport.ExtraLinkArgs;
import com.google.devtools.build.lib.testutil.Scratch;
import java.io.IOException;
import java.util.Set;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Test case for apple_dyamic_library. */
@RunWith(JUnit4.class)
public class AppleDynamicLibraryTest extends ObjcRuleTestCase {
static final RuleType RULE_TYPE = new RuleType("apple_binary") {
@Override
Iterable<String> requiredAttributes(Scratch scratch, String packageDir,
Set<String> alreadyAdded) throws IOException {
return Iterables.concat(ImmutableList.of("binary_type = 'dylib'"),
AppleBinaryTest.RULE_TYPE.requiredAttributes(scratch, packageDir, alreadyAdded));
}
};
@Test
public void testCcDependencyLinkoptsArePropagatedToLinkAction() throws Exception {
checkCcDependencyLinkoptsArePropagatedToLinkAction(RULE_TYPE);
}
@Test
public void testUnknownPlatformType() throws Exception {
checkError(
"package",
"test",
String.format(MultiArchSplitTransitionProvider.UNSUPPORTED_PLATFORM_TYPE_ERROR_FORMAT,
"meow_meow_os"),
"apple_binary(name = 'test', binary_type = 'dylib', srcs = [ 'a.m' ], "
+ "platform_type = 'meow_meow_os')");
}
@Test
public void testProtoBundlingAndLinking() throws Exception {
checkProtoBundlingAndLinking(RULE_TYPE);
}
@Test
public void testProtoBundlingWithTargetsWithNoDeps() throws Exception {
checkProtoBundlingWithTargetsWithNoDeps(RULE_TYPE);
}
@Test
public void testCanUseCrosstool_singleArch() throws Exception {
checkLinkingRuleCanUseCrosstool_singleArch(RULE_TYPE);
}
@Test
public void testCanUseCrosstool_multiArch() throws Exception {
checkLinkingRuleCanUseCrosstool_multiArch(RULE_TYPE);
}
@Test
public void testAppleSdkIphoneosPlatformEnv() throws Exception {
checkAppleSdkIphoneosPlatformEnv(RULE_TYPE);
}
@Test
public void testXcodeVersionEnv() throws Exception {
checkXcodeVersionEnv(RULE_TYPE);
}
@Test
public void testAliasedLinkoptsThroughObjcLibrary() throws Exception {
checkAliasedLinkoptsThroughObjcLibrary(RULE_TYPE);
}
@Test
public void testObjcProviderLinkInputsInLinkAction() throws Exception {
checkObjcProviderLinkInputsInLinkAction(RULE_TYPE);
}
@Test
public void testAppleSdkVersionEnv() throws Exception {
checkAppleSdkVersionEnv(RULE_TYPE);
}
@Test
public void testNonDefaultAppleSdkVersionEnv() throws Exception {
checkNonDefaultAppleSdkVersionEnv(RULE_TYPE);
}
@Test
public void testAppleSdkDefaultPlatformEnv() throws Exception {
checkAppleSdkDefaultPlatformEnv(RULE_TYPE);
}
@Test
public void testAvoidDepsObjects_avoidViaCcLibrary() throws Exception {
checkAvoidDepsObjects_avoidViaCcLibrary(RULE_TYPE);
}
@Test
public void testNoSrcs() throws Exception {
checkNoSrcs(RULE_TYPE);
}
@Test
public void testLipoBinaryAction() throws Exception {
checkLipoBinaryAction(RULE_TYPE);
}
@Test
public void testWatchSimulatorDepCompile() throws Exception {
checkWatchSimulatorDepCompile(RULE_TYPE);
}
@Test
public void testMultiarchCcDep() throws Exception {
checkMultiarchCcDep(RULE_TYPE);
}
@Test
public void testWatchSimulatorLipoAction() throws Exception {
checkWatchSimulatorLipoAction(RULE_TYPE);
}
@Test
public void testLinkActionsWithSrcs() throws Exception {
checkLinkActionsWithSrcs(RULE_TYPE,
new ExtraLinkArgs("-dynamiclib"));
}
@Test
public void testFrameworkDepLinkFlags() throws Exception {
checkFrameworkDepLinkFlags(RULE_TYPE, new ExtraLinkArgs("-dynamiclib"));
}
@Test
public void testDylibDependencies() throws Exception {
checkDylibDependencies(RULE_TYPE, new ExtraLinkArgs("-dynamiclib"));
}
@Test
public void testMinimumOs() throws Exception {
checkMinimumOsLinkAndCompileArg(RULE_TYPE);
}
@Test
public void testMinimumOs_watchos() throws Exception {
checkMinimumOsLinkAndCompileArg_watchos(RULE_TYPE);
}
@Test
public void testMinimumOs_invalid() throws Exception {
checkMinimumOs_invalid_nonVersion(RULE_TYPE);
}
@Test
public void testAppleSdkWatchsimulatorPlatformEnv() throws Exception {
checkAppleSdkWatchsimulatorPlatformEnv(RULE_TYPE);
}
@Test
public void testAppleSdkWatchosPlatformEnv() throws Exception {
checkAppleSdkWatchosPlatformEnv(RULE_TYPE);
}
@Test
public void testAppleSdkTvsimulatorPlatformEnv() throws Exception {
checkAppleSdkTvsimulatorPlatformEnv(RULE_TYPE);
}
@Test
public void testAppleSdkTvosPlatformEnv() throws Exception {
checkAppleSdkTvosPlatformEnv(RULE_TYPE);
}
@Test
public void testWatchSimulatorLinkAction() throws Exception {
checkWatchSimulatorLinkAction(RULE_TYPE);
}
@Test
public void testAvoidDepsObjects() throws Exception {
checkAvoidDepsObjects(RULE_TYPE);
}
@Test
public void testMinimumOsDifferentTargets() throws Exception {
checkMinimumOsDifferentTargets(RULE_TYPE, "_lipobin", "_bin");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.kstream.internals.suppress;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.SystemTime;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.Suppressed;
import org.apache.kafka.streams.kstream.internals.Change;
import org.apache.kafka.streams.kstream.internals.KTableImpl;
import org.apache.kafka.streams.processor.Processor;
import org.apache.kafka.streams.processor.StateStore;
import org.apache.kafka.streams.processor.TaskId;
import org.apache.kafka.streams.processor.internals.ProcessorNode;
import org.apache.kafka.streams.state.internals.InMemoryTimeOrderedKeyValueBuffer;
import org.apache.kafka.test.MockInternalProcessorContext;
import org.apache.kafka.test.StreamsTestUtils;
import org.apache.kafka.test.TestUtils;
import org.easymock.EasyMock;
import org.hamcrest.Matcher;
import org.junit.Test;
import java.time.Duration;
import java.util.Map;
import java.util.Properties;
import static org.apache.kafka.common.utils.Utils.mkEntry;
import static org.apache.kafka.common.utils.Utils.mkMap;
import static org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxRecords;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.core.Is.is;
public class KTableSuppressProcessorMetricsTest {
private static final long ARBITRARY_LONG = 5L;
private static final TaskId TASK_ID = new TaskId(0, 0);
private Properties streamsConfig = StreamsTestUtils.getStreamsConfig();
private final String threadId = Thread.currentThread().getName();
private final MetricName evictionTotalMetric0100To24 = new MetricName(
"suppression-emit-total",
"stream-processor-node-metrics",
"The total number of emitted records from the suppression buffer",
mkMap(
mkEntry("client-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("processor-node-id", "testNode")
)
);
private final MetricName evictionTotalMetricLatest = new MetricName(
"suppression-emit-total",
"stream-processor-node-metrics",
"The total number of emitted records from the suppression buffer",
mkMap(
mkEntry("thread-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("processor-node-id", "testNode")
)
);
private final MetricName evictionRateMetric0100To24 = new MetricName(
"suppression-emit-rate",
"stream-processor-node-metrics",
"The average number of emitted records from the suppression buffer per second",
mkMap(
mkEntry("client-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("processor-node-id", "testNode")
)
);
private final MetricName evictionRateMetricLatest = new MetricName(
"suppression-emit-rate",
"stream-processor-node-metrics",
"The average number of emitted records from the suppression buffer per second",
mkMap(
mkEntry("thread-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("processor-node-id", "testNode")
)
);
private final MetricName bufferSizeAvgMetric0100To24 = new MetricName(
"suppression-buffer-size-avg",
"stream-buffer-metrics",
"The average size of buffered records",
mkMap(
mkEntry("client-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("buffer-id", "test-store")
)
);
private final MetricName bufferSizeAvgMetricLatest = new MetricName(
"suppression-buffer-size-avg",
"stream-state-metrics",
"The average size of buffered records",
mkMap(
mkEntry("thread-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("in-memory-suppression-state-id", "test-store")
)
);
private final MetricName bufferSizeCurrentMetric = new MetricName(
"suppression-buffer-size-current",
"stream-buffer-metrics",
"The current size of buffered records",
mkMap(
mkEntry("client-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("buffer-id", "test-store")
)
);
private final MetricName bufferSizeMaxMetric0100To24 = new MetricName(
"suppression-buffer-size-max",
"stream-buffer-metrics",
"The maximum size of buffered records",
mkMap(
mkEntry("client-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("buffer-id", "test-store")
)
);
private final MetricName bufferSizeMaxMetricLatest = new MetricName(
"suppression-buffer-size-max",
"stream-state-metrics",
"The maximum size of buffered records",
mkMap(
mkEntry("thread-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("in-memory-suppression-state-id", "test-store")
)
);
private final MetricName bufferCountAvgMetric0100To24 = new MetricName(
"suppression-buffer-count-avg",
"stream-buffer-metrics",
"The average count of buffered records",
mkMap(
mkEntry("client-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("buffer-id", "test-store")
)
);
private final MetricName bufferCountAvgMetricLatest = new MetricName(
"suppression-buffer-count-avg",
"stream-state-metrics",
"The average count of buffered records",
mkMap(
mkEntry("thread-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("in-memory-suppression-state-id", "test-store")
)
);
private final MetricName bufferCountCurrentMetric = new MetricName(
"suppression-buffer-count-current",
"stream-buffer-metrics",
"The current count of buffered records",
mkMap(
mkEntry("client-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("buffer-id", "test-store")
)
);
private final MetricName bufferCountMaxMetric0100To24 = new MetricName(
"suppression-buffer-count-max",
"stream-buffer-metrics",
"The maximum count of buffered records",
mkMap(
mkEntry("client-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("buffer-id", "test-store")
)
);
private final MetricName bufferCountMaxMetricLatest = new MetricName(
"suppression-buffer-count-max",
"stream-state-metrics",
"The maximum count of buffered records",
mkMap(
mkEntry("thread-id", threadId),
mkEntry("task-id", TASK_ID.toString()),
mkEntry("in-memory-suppression-state-id", "test-store")
)
);
@Test
public void shouldRecordMetricsWithBuiltInMetricsVersionLatest() {
shouldRecordMetrics(StreamsConfig.METRICS_LATEST);
}
@Test
public void shouldRecordMetricsWithBuiltInMetricsVersion0100To24() {
shouldRecordMetrics(StreamsConfig.METRICS_0100_TO_24);
}
private void shouldRecordMetrics(final String builtInMetricsVersion) {
final String storeName = "test-store";
final StateStore buffer = new InMemoryTimeOrderedKeyValueBuffer.Builder<>(
storeName, Serdes.String(),
Serdes.Long()
)
.withLoggingDisabled()
.build();
final KTableImpl<String, ?, Long> mock = EasyMock.mock(KTableImpl.class);
final Processor<String, Change<Long>> processor =
new KTableSuppressProcessorSupplier<>(
(SuppressedInternal<String>) Suppressed.<String>untilTimeLimit(Duration.ofDays(100), maxRecords(1)),
storeName,
mock
).get();
streamsConfig.setProperty(StreamsConfig.BUILT_IN_METRICS_VERSION_CONFIG, builtInMetricsVersion);
final MockInternalProcessorContext context =
new MockInternalProcessorContext(streamsConfig, TASK_ID, TestUtils.tempDirectory());
final Time time = new SystemTime();
context.setCurrentNode(new ProcessorNode("testNode"));
context.setSystemTimeMs(time.milliseconds());
buffer.init(context, buffer);
processor.init(context);
final long timestamp = 100L;
context.setRecordMetadata("", 0, 0L, null, timestamp);
final String key = "longKey";
final Change<Long> value = new Change<>(null, ARBITRARY_LONG);
processor.process(key, value);
final MetricName evictionRateMetric =
StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion) ? evictionRateMetric0100To24 : evictionRateMetricLatest;
final MetricName evictionTotalMetric =
StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion) ? evictionTotalMetric0100To24 : evictionTotalMetricLatest;
final MetricName bufferSizeAvgMetric =
StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion) ? bufferSizeAvgMetric0100To24 : bufferSizeAvgMetricLatest;
final MetricName bufferSizeMaxMetric =
StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion) ? bufferSizeMaxMetric0100To24 : bufferSizeMaxMetricLatest;
final MetricName bufferCountAvgMetric =
StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion) ? bufferCountAvgMetric0100To24 : bufferCountAvgMetricLatest;
final MetricName bufferCountMaxMetric =
StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion) ? bufferCountMaxMetric0100To24 : bufferCountMaxMetricLatest;
{
final Map<MetricName, ? extends Metric> metrics = context.metrics().metrics();
verifyMetric(metrics, evictionRateMetric, is(0.0));
verifyMetric(metrics, evictionTotalMetric, is(0.0));
verifyMetric(metrics, bufferSizeAvgMetric, is(21.5));
verifyMetric(metrics, bufferSizeMaxMetric, is(43.0));
verifyMetric(metrics, bufferCountAvgMetric, is(0.5));
verifyMetric(metrics, bufferCountMaxMetric, is(1.0));
if (StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion)) {
verifyMetric(metrics, bufferSizeCurrentMetric, is(43.0));
verifyMetric(metrics, bufferCountCurrentMetric, is(1.0));
}
}
context.setRecordMetadata("", 0, 1L, null, timestamp + 1);
processor.process("key", value);
{
final Map<MetricName, ? extends Metric> metrics = context.metrics().metrics();
verifyMetric(metrics, evictionRateMetric, greaterThan(0.0));
verifyMetric(metrics, evictionTotalMetric, is(1.0));
verifyMetric(metrics, bufferSizeAvgMetric, is(41.0));
verifyMetric(metrics, bufferSizeMaxMetric, is(82.0));
verifyMetric(metrics, bufferCountAvgMetric, is(1.0));
verifyMetric(metrics, bufferCountMaxMetric, is(2.0));
if (StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion)) {
verifyMetric(metrics, bufferSizeCurrentMetric, is(39.0));
verifyMetric(metrics, bufferCountCurrentMetric, is(1.0));
}
}
}
@SuppressWarnings("unchecked")
private static <T> void verifyMetric(final Map<MetricName, ? extends Metric> metrics,
final MetricName metricName,
final Matcher<T> matcher) {
assertThat(metrics.get(metricName).metricName().description(), is(metricName.description()));
assertThat((T) metrics.get(metricName).metricValue(), matcher);
}
}
| |
/*
* Copyright (C) 2006 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nmj.views;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.view.Gravity;
import android.widget.LinearLayout;
import com.nmj.nmjmanager.R;
public class ForegroundLinearLayout extends LinearLayout {
private Drawable mForeground;
private final Rect mSelfBounds = new Rect();
private final Rect mOverlayBounds = new Rect();
private int mForegroundGravity = Gravity.FILL;
protected boolean mForegroundInPadding = true;
boolean mForegroundBoundsChanged = false;
public ForegroundLinearLayout(Context context) {
super(context);
}
public ForegroundLinearLayout(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public ForegroundLinearLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.ForegroundLinearLayout,
defStyle, 0);
mForegroundGravity = a.getInt(
R.styleable.ForegroundLinearLayout_android_foregroundGravity, mForegroundGravity);
final Drawable d = a.getDrawable(R.styleable.ForegroundLinearLayout_android_foreground);
if (d != null) {
setForeground(d);
}
mForegroundInPadding = a.getBoolean(
R.styleable.ForegroundLinearLayout_android_foregroundInsidePadding, true);
a.recycle();
}
/**
* Describes how the foreground is positioned.
*
* @return foreground gravity.
*
* @see #setForegroundGravity(int)
*/
public int getForegroundGravity() {
return mForegroundGravity;
}
/**
* Describes how the foreground is positioned. Defaults to START and TOP.
*
* @param foregroundGravity See {@link android.view.Gravity}
*
* @see #getForegroundGravity()
*/
public void setForegroundGravity(int foregroundGravity) {
if (mForegroundGravity != foregroundGravity) {
if ((foregroundGravity & Gravity.RELATIVE_HORIZONTAL_GRAVITY_MASK) == 0) {
foregroundGravity |= Gravity.START;
}
if ((foregroundGravity & Gravity.VERTICAL_GRAVITY_MASK) == 0) {
foregroundGravity |= Gravity.TOP;
}
mForegroundGravity = foregroundGravity;
if (mForegroundGravity == Gravity.FILL && mForeground != null) {
Rect padding = new Rect();
mForeground.getPadding(padding);
}
requestLayout();
}
}
@Override
protected boolean verifyDrawable(Drawable who) {
return super.verifyDrawable(who) || (who == mForeground);
}
@Override
public void jumpDrawablesToCurrentState() {
super.jumpDrawablesToCurrentState();
if (mForeground != null) mForeground.jumpToCurrentState();
}
@Override
protected void drawableStateChanged() {
super.drawableStateChanged();
if (mForeground != null && mForeground.isStateful()) {
mForeground.setState(getDrawableState());
}
}
/**
* Supply a Drawable that is to be rendered on top of all of the child
* views in the frame layout. Any padding in the Drawable will be taken
* into account by ensuring that the children are inset to be placed
* inside of the padding area.
*
* @param drawable The Drawable to be drawn on top of the children.
*/
public void setForeground(Drawable drawable) {
if (mForeground != drawable) {
if (mForeground != null) {
mForeground.setCallback(null);
unscheduleDrawable(mForeground);
}
mForeground = drawable;
if (drawable != null) {
setWillNotDraw(false);
drawable.setCallback(this);
if (drawable.isStateful()) {
drawable.setState(getDrawableState());
}
if (mForegroundGravity == Gravity.FILL) {
Rect padding = new Rect();
drawable.getPadding(padding);
}
} else {
setWillNotDraw(true);
}
requestLayout();
invalidate();
}
}
/**
* Returns the drawable used as the foreground of this FrameLayout. The
* foreground drawable, if non-null, is always drawn on top of the children.
*
* @return A Drawable or null if no foreground was set.
*/
public Drawable getForeground() {
return mForeground;
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
super.onLayout(changed, left, top, right, bottom);
mForegroundBoundsChanged = changed;
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
mForegroundBoundsChanged = true;
}
@Override
public void draw(Canvas canvas) {
super.draw(canvas);
if (mForeground != null) {
final Drawable foreground = mForeground;
if (mForegroundBoundsChanged) {
mForegroundBoundsChanged = false;
final Rect selfBounds = mSelfBounds;
final Rect overlayBounds = mOverlayBounds;
final int w = getRight() - getLeft();
final int h = getBottom() - getTop();
if (mForegroundInPadding) {
selfBounds.set(0, 0, w, h);
} else {
selfBounds.set(getPaddingLeft(), getPaddingTop(),
w - getPaddingRight(), h - getPaddingBottom());
}
Gravity.apply(mForegroundGravity, foreground.getIntrinsicWidth(),
foreground.getIntrinsicHeight(), selfBounds, overlayBounds);
foreground.setBounds(overlayBounds);
}
foreground.draw(canvas);
}
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.process.workitem;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.jbpm.process.core.datatype.impl.type.EnumDataType;
import org.jbpm.process.core.datatype.impl.type.ListDataType;
import org.jbpm.process.core.datatype.impl.type.StringDataType;
import org.jbpm.process.workitem.enums.AnimalsEnum;
import org.jbpm.process.workitem.enums.CarsEnum;
import org.jbpm.test.util.AbstractBaseTest;
import org.junit.Test;
import static org.junit.Assert.*;
public class WorkDefinitionImplTest extends AbstractBaseTest {
@Test
public void testServices() throws Exception {
Map<String, WorkDefinitionImpl> repoResults = WorkItemRepository.getWorkDefinitions(getClass().getResource("repository").toURI().toString());
assertNotNull(repoResults);
assertFalse(repoResults.isEmpty());
assertEquals(repoResults.size(), 9);
WorkDefinitionImpl testServiceOne = repoResults.get("TestServiceOne");
assertNotNull(testServiceOne);
assertEquals("TestServiceOne", testServiceOne.getName());
assertEquals("TestServiceOne", testServiceOne.getDisplayName());
assertEquals("Test Service One", testServiceOne.getDescription());
assertEquals(3, testServiceOne.getParameters().size());
assertEquals("testserviceone.png", testServiceOne.getIcon());
assertEquals("MyTestServices", testServiceOne.getCategory());
assertEquals(0, testServiceOne.getDependencies().length);
assertEquals("MyTestServices", testServiceOne.getCategory());
assertEquals("mvel", testServiceOne.getWidType());
WorkDefinitionImpl testServiceTwo = repoResults.get("TestServiceTwo");
assertNotNull(testServiceTwo);
assertEquals(2, testServiceTwo.getResults().size());
assertTrue(testServiceTwo.getResult("result1").getType() instanceof StringDataType);
assertTrue(testServiceTwo.getResult("result2").getType() instanceof StringDataType);
assertEquals("mvel", testServiceTwo.getWidType());
WorkDefinitionImpl testServiceThree = repoResults.get("TestServiceThree");
assertNotNull(testServiceThree);
assertEquals("1.0", testServiceThree.getVersion());
assertEquals("org.drools.eclipse.flow.common.editor.editpart.work.SampleCustomEditor", testServiceThree.getCustomEditor());
assertEquals("org.jbpm.process.workitem.MyHandler", testServiceThree.getDefaultHandler());
assertEquals(2, testServiceThree.getDependencies().length);
assertEquals(2, testServiceThree.getMavenDependencies().length);
assertEquals("mvel", testServiceThree.getWidType());
// service defined as json wids
WorkDefinitionImpl testServiceFour = repoResults.get("TestServiceFour");
assertNotNull(testServiceFour);
assertEquals("TestServiceFour", testServiceFour.getName());
assertEquals("TestServiceFour", testServiceFour.getDisplayName());
assertEquals("Test Service Four", testServiceFour.getDescription());
assertEquals(3, testServiceFour.getParameters().size());
assertEquals(0, testServiceFour.getResults().size());
assertEquals("1.0", testServiceFour.getVersion());
assertEquals(2, testServiceFour.getDependencies().length);
assertEquals("json", testServiceFour.getWidType());
WorkDefinitionImpl testServiceFour2 = repoResults.get("TestServiceFour2");
assertNotNull(testServiceFour2);
assertEquals("TestServiceFour2", testServiceFour2.getName());
assertEquals("TestServiceFour2", testServiceFour2.getDisplayName());
assertEquals("Test Service Four2", testServiceFour2.getDescription());
assertEquals(2, testServiceFour2.getParameters().size());
assertEquals(2, testServiceFour2.getResults().size());
assertTrue(testServiceFour2.getResult("c").getType() instanceof ListDataType);
assertTrue(testServiceFour2.getResult("d").getType() instanceof StringDataType);
assertEquals("2.0", testServiceFour2.getVersion());
assertEquals(3, testServiceFour2.getDependencies().length);
assertEquals(2, testServiceFour2.getMavenDependencies().length);
assertEquals("json", testServiceFour2.getWidType());
// workitem with no dependency defined
WorkDefinitionImpl testServiceFive = repoResults.get("TestServiceFive");
assertNotNull(testServiceFive);
assertEquals("TestServiceFive", testServiceFive.getName());
assertEquals("TestServiceFive", testServiceFive.getDisplayName());
assertNull(testServiceFive.getDependencies());
}
@Test
public void testParameterValuesWithEnumsOnly() throws Exception {
Map<String, WorkDefinitionImpl> repoResults = WorkItemRepository.getWorkDefinitions(getClass().getResource("repository").toURI().toString());
assertNotNull(repoResults);
assertFalse(repoResults.isEmpty());
assertEquals(repoResults.size(), 9);
WorkDefinitionImpl testServiceWithParamValues = repoResults.get("TestServiceWithParamValues");
assertNotNull(testServiceWithParamValues);
assertNotNull(testServiceWithParamValues.getParameterValues());
Map<String, Object> parameterValues = testServiceWithParamValues.getParameterValues();
assertNotNull(parameterValues);
assertEquals(parameterValues.size(), 2);
for( Map.Entry<String, Object> entry : parameterValues.entrySet() ) {
assertTrue( entry.getValue() instanceof EnumDataType );
if (entry.getKey().equals("param1")) {
EnumDataType paramEnum = (EnumDataType) entry.getValue();
assertEquals("org.jbpm.process.workitem.enums.AnimalsEnum", paramEnum.getClassName());
Map<String, Object> paramValuesMap = paramEnum.getValueMap(null);
assertNotNull(paramValuesMap);
assertEquals(5, paramValuesMap.size());
assertTrue(paramValuesMap.containsKey("DOGS"));
assertTrue(paramValuesMap.containsKey("CATS"));
assertTrue(paramValuesMap.containsKey("ELEPHANTS"));
assertTrue(paramValuesMap.containsKey("GIRAFFES"));
assertTrue(paramValuesMap.containsKey("BIRDS"));
assertEquals(paramValuesMap.get("DOGS"), AnimalsEnum.DOGS);
assertEquals(paramValuesMap.get("CATS"), AnimalsEnum.CATS);
assertEquals(paramValuesMap.get("ELEPHANTS"), AnimalsEnum.ELEPHANTS);
assertEquals(paramValuesMap.get("GIRAFFES"), AnimalsEnum.GIRAFFES);
assertEquals(paramValuesMap.get("BIRDS"), AnimalsEnum.BIRDS);
} else if(entry.getKey().equals("param3")) {
EnumDataType paramEnum = (EnumDataType) entry.getValue();
assertEquals("org.jbpm.process.workitem.enums.CarsEnum", paramEnum.getClassName());
Map<String, Object> paramValuesMap = paramEnum.getValueMap(null);
assertNotNull(paramValuesMap);
assertEquals(5, paramValuesMap.size());
assertTrue(paramValuesMap.containsKey("HONDA"));
assertTrue(paramValuesMap.containsKey("MAZDA"));
assertTrue(paramValuesMap.containsKey("NISSAN"));
assertTrue(paramValuesMap.containsKey("TOYOTA"));
assertTrue(paramValuesMap.containsKey("FORD"));
assertEquals(paramValuesMap.get("HONDA"), CarsEnum.HONDA);
assertEquals(paramValuesMap.get("MAZDA"), CarsEnum.MAZDA);
assertEquals(paramValuesMap.get("NISSAN"), CarsEnum.NISSAN);
assertEquals(paramValuesMap.get("TOYOTA"), CarsEnum.TOYOTA);
assertEquals(paramValuesMap.get("FORD"), CarsEnum.FORD);
} else {
fail("invalid parameter name");
}
}
}
@Test
public void testParameterValuesWithStringsOnly() throws Exception {
Map<String, WorkDefinitionImpl> repoResults = WorkItemRepository.getWorkDefinitions(getClass().getResource("repository").toURI().toString());
assertNotNull(repoResults);
assertFalse(repoResults.isEmpty());
assertEquals(repoResults.size(), 9);
WorkDefinitionImpl testServiceWithParamValuesTwo = repoResults.get("TestServiceWithParamValuesTwo");
assertNotNull(testServiceWithParamValuesTwo);
assertNotNull(testServiceWithParamValuesTwo.getParameterValues());
Map<String, Object> parameterValues = testServiceWithParamValuesTwo.getParameterValues();
assertNotNull(parameterValues);
assertEquals(parameterValues.size(), 2);
for( Map.Entry<String, Object> entry : parameterValues.entrySet() ) {
assertTrue( entry.getValue() instanceof String );
assertNotNull( entry.getValue());
if (entry.getKey().equals("param1")) {
String paramValue = (String) entry.getValue();
List<String> paramValueList = Arrays.asList(paramValue.split(","));
assertNotNull(paramValueList);
assertEquals(3, paramValueList.size());
assertTrue(paramValueList.contains("one"));
assertTrue(paramValueList.contains("two"));
assertTrue(paramValueList.contains("three"));
} else if(entry.getKey().equals("param3")) {
String paramValue = (String) entry.getValue();
List<String> paramValueList = Arrays.asList(paramValue.split(","));
assertNotNull(paramValueList);
assertEquals(3, paramValueList.size());
assertTrue(paramValueList.contains("four"));
assertTrue(paramValueList.contains("five"));
assertTrue(paramValueList.contains("six"));
} else {
fail("invalid parameter name");
}
}
}
@Test
public void testParameterValuesWithStringsAndEnums() throws Exception {
Map<String, WorkDefinitionImpl> repoResults = WorkItemRepository.getWorkDefinitions(getClass().getResource("repository").toURI().toString());
assertNotNull(repoResults);
assertFalse(repoResults.isEmpty());
assertEquals(repoResults.size(), 9);
WorkDefinitionImpl testServiceWithParamValuesThree = repoResults.get("TestServiceWithParamValuesThree");
assertNotNull(testServiceWithParamValuesThree);
assertNotNull(testServiceWithParamValuesThree.getParameterValues());
Map<String, Object> parameterValues = testServiceWithParamValuesThree.getParameterValues();
assertNotNull(parameterValues);
assertEquals(parameterValues.size(), 2);
/**
* "parameterValues" : [
"param1" : new EnumDataType("org.jbpm.process.workitem.enums.AnimalsEnum"),
"param3" : "one, two, three"
],
*/
for( Map.Entry<String, Object> entry : parameterValues.entrySet() ) {
assertNotNull( entry.getValue());
if (entry.getKey().equals("param1")) {
assertTrue( entry.getValue() instanceof EnumDataType );
EnumDataType paramEnum = (EnumDataType) entry.getValue();
assertEquals("org.jbpm.process.workitem.enums.AnimalsEnum", paramEnum.getClassName());
Map<String, Object> paramValuesMap = paramEnum.getValueMap(null);
assertNotNull(paramValuesMap);
assertEquals(5, paramValuesMap.size());
assertTrue(paramValuesMap.containsKey("DOGS"));
assertTrue(paramValuesMap.containsKey("CATS"));
assertTrue(paramValuesMap.containsKey("ELEPHANTS"));
assertTrue(paramValuesMap.containsKey("GIRAFFES"));
assertTrue(paramValuesMap.containsKey("BIRDS"));
assertEquals(paramValuesMap.get("DOGS"), AnimalsEnum.DOGS);
assertEquals(paramValuesMap.get("CATS"), AnimalsEnum.CATS);
assertEquals(paramValuesMap.get("ELEPHANTS"), AnimalsEnum.ELEPHANTS);
assertEquals(paramValuesMap.get("GIRAFFES"), AnimalsEnum.GIRAFFES);
assertEquals(paramValuesMap.get("BIRDS"), AnimalsEnum.BIRDS);
} else if(entry.getKey().equals("param3")) {
assertTrue( entry.getValue() instanceof String );
String paramValue = (String) entry.getValue();
List<String> paramValueList = Arrays.asList(paramValue.split(","));
assertNotNull(paramValueList);
assertEquals(3, paramValueList.size());
assertTrue(paramValueList.contains("one"));
assertTrue(paramValueList.contains("two"));
assertTrue(paramValueList.contains("three"));
} else {
fail("invalid parameter name");
}
}
}
}
| |
package org.wikidata.wdtk.storage.datastructures;
/*
* #%L
* Wikidata Toolkit Data Model
* %%
* Copyright (C) 2014 Wikidata Toolkit Developers
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.Iterator;
/**
* Default implementation of {@link RankedBitVector}. This implementation uses
* auxiliary classes to have efficient performance for the methods of a ranked
* bit vector. Hence, {@link #countBits(boolean, long)} uses an instance of
* {@link CountBitsArray} and {@link #findPosition(boolean, long)} uses two
* instances of {@link FindPositionArray}.
*
* @see CountBitsArray
*
* @see FindPositionArray
*
* @author Julian Mendez
*/
public class RankedBitVectorImpl implements RankedBitVector, Iterable<Boolean> {
static final int defaultCountBitsBlockSize = 0x400;
static final int defaultFindPositionBlockSize = 0x2000;
final BitVectorImpl bitVector;
final CountBitsArray countBitsArray;
final FindPositionArray findPositionOfFalse;
final FindPositionArray findPositionOfTrue;
/**
* Constructor of a ranked bit vector of size 0.
*/
public RankedBitVectorImpl() {
this.bitVector = new BitVectorImpl();
this.countBitsArray = new CountBitsArray(this.bitVector,
defaultCountBitsBlockSize);
this.findPositionOfFalse = new FindPositionArray(this.bitVector, false,
defaultFindPositionBlockSize);
this.findPositionOfTrue = new FindPositionArray(this.bitVector, true,
defaultFindPositionBlockSize);
}
/**
* Copy constructor of a ranked bit vector.
*
* @param bitVector
* bit vector
*/
public RankedBitVectorImpl(BitVector bitVector) {
this.bitVector = new BitVectorImpl(bitVector);
if (bitVector instanceof RankedBitVectorImpl) {
this.countBitsArray = new CountBitsArray(this.bitVector,
((RankedBitVectorImpl) bitVector).countBitsArray
.getBlockSize());
} else {
this.countBitsArray = new CountBitsArray(this.bitVector,
defaultCountBitsBlockSize);
}
this.findPositionOfFalse = new FindPositionArray(this.bitVector, false,
defaultFindPositionBlockSize);
this.findPositionOfTrue = new FindPositionArray(this.bitVector, true,
defaultFindPositionBlockSize);
}
/**
* Constructor of a ranked bit vector of size <i>initialSize</i>. The bit
* vector contains <code>false</code> at all indexes.
*
* @param initialSize
* initial size of this ranked bit vector
*/
public RankedBitVectorImpl(long initialSize) {
this.bitVector = new BitVectorImpl(initialSize);
this.countBitsArray = new CountBitsArray(this.bitVector,
defaultCountBitsBlockSize);
this.findPositionOfFalse = new FindPositionArray(this.bitVector, false,
defaultFindPositionBlockSize);
this.findPositionOfTrue = new FindPositionArray(this.bitVector, true,
defaultFindPositionBlockSize);
}
/**
* Constructor of a ranked bit vector of size <i>initialSize</i> and block
* size <i>blockSize</i>. The bit vector contains <code>false</code> at all
* indexes.
*
* @param initialSize
* initial size of this ranked bit vector
* @param countBlockSize
* block size to count number of occurrences of a value; this
* value must be a positive number
* @param findPositionBlockSize
* block size to find the position of the <i>n</i>-th occurrence
* of a value; this value must be greater than or equal to 64
* @throws IllegalArgumentException
* if any of the block sizes is too small
*/
public RankedBitVectorImpl(long initialSize, int countBlockSize,
int findPositionBlockSize) {
this.bitVector = new BitVectorImpl(initialSize);
this.countBitsArray = new CountBitsArray(this.bitVector, countBlockSize);
this.findPositionOfFalse = new FindPositionArray(this.bitVector, false,
findPositionBlockSize);
this.findPositionOfTrue = new FindPositionArray(this.bitVector, true,
findPositionBlockSize);
}
@Override
public boolean addBit(boolean bit) {
boolean ret = this.bitVector.addBit(bit);
notifyObservers();
return ret;
}
@Override
public long countBits(boolean bit, long position) {
return this.countBitsArray.countBits(bit, position);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof BitVector)) {
return false;
}
return this.bitVector.equals(obj);
}
@Override
public long findPosition(boolean bit, long nOccurrence) {
if (nOccurrence <= 0) {
return NOT_FOUND;
}
return bit ? this.findPositionOfTrue.findPosition(nOccurrence)
: this.findPositionOfFalse.findPosition(nOccurrence);
}
@Override
public boolean getBit(long position) {
return this.bitVector.getBit(position);
}
@Override
public int hashCode() {
return this.bitVector.hashCode();
}
@Override
public Iterator<Boolean> iterator() {
return this.bitVector.iterator();
}
void notifyObservers() {
this.countBitsArray.update();
this.findPositionOfFalse.update();
this.findPositionOfTrue.update();
}
@Override
public void setBit(long position, boolean bit) {
boolean oldBit = getBit(position);
if (oldBit != bit) {
this.bitVector.setBit(position, bit);
notifyObservers();
}
}
@Override
public long size() {
return this.bitVector.size();
}
@Override
public String toString() {
return this.bitVector.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hive.hcatalog.data.schema;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Schema;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hive.hcatalog.common.HCatException;
import org.apache.hive.hcatalog.data.schema.HCatFieldSchema.Type;
public class HCatSchemaUtils {
public static CollectionBuilder getStructSchemaBuilder() {
return new CollectionBuilder();
}
public static CollectionBuilder getListSchemaBuilder() {
return new CollectionBuilder();
}
public static MapBuilder getMapSchemaBuilder() {
return new MapBuilder();
}
public static abstract class HCatSchemaBuilder {
public abstract HCatSchema build() throws HCatException;
}
public static class CollectionBuilder extends HCatSchemaBuilder { // for STRUCTS(multiple-add-calls) and LISTS(single-add-call)
List<HCatFieldSchema> fieldSchemas = null;
CollectionBuilder() {
fieldSchemas = new ArrayList<HCatFieldSchema>();
}
public CollectionBuilder addField(FieldSchema fieldSchema) throws HCatException {
return this.addField(getHCatFieldSchema(fieldSchema));
}
public CollectionBuilder addField(HCatFieldSchema fieldColumnSchema) {
fieldSchemas.add(fieldColumnSchema);
return this;
}
@Override
public HCatSchema build() throws HCatException {
return new HCatSchema(fieldSchemas);
}
}
public static class MapBuilder extends HCatSchemaBuilder {
PrimitiveTypeInfo keyType = null;
HCatSchema valueSchema = null;
@Override
public HCatSchema build() throws HCatException {
List<HCatFieldSchema> fslist = new ArrayList<HCatFieldSchema>();
fslist.add(HCatFieldSchema.createMapTypeFieldSchema(null, keyType, valueSchema, null));
return new HCatSchema(fslist);
}
public MapBuilder withValueSchema(HCatSchema valueSchema) {
this.valueSchema = valueSchema;
return this;
}
public MapBuilder withKeyType(PrimitiveTypeInfo keyType) {
this.keyType = keyType;
return this;
}
}
/**
* Convert a HCatFieldSchema to a FieldSchema
* @param fs FieldSchema to convert
* @return HCatFieldSchema representation of FieldSchema
* @throws HCatException
*/
public static HCatFieldSchema getHCatFieldSchema(FieldSchema fs) throws HCatException {
String fieldName = fs.getName();
TypeInfo baseTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(fs.getType());
return getHCatFieldSchema(fieldName, baseTypeInfo);
}
private static HCatFieldSchema getHCatFieldSchema(String fieldName, TypeInfo fieldTypeInfo) throws HCatException {
Category typeCategory = fieldTypeInfo.getCategory();
HCatFieldSchema hCatFieldSchema;
if (Category.PRIMITIVE == typeCategory) {
hCatFieldSchema = new HCatFieldSchema(fieldName, (PrimitiveTypeInfo)fieldTypeInfo, null);
} else if (Category.STRUCT == typeCategory) {
HCatSchema subSchema = constructHCatSchema((StructTypeInfo) fieldTypeInfo);
hCatFieldSchema = new HCatFieldSchema(fieldName, HCatFieldSchema.Type.STRUCT, subSchema, null);
} else if (Category.LIST == typeCategory) {
HCatSchema subSchema = getHCatSchema(((ListTypeInfo) fieldTypeInfo).getListElementTypeInfo());
hCatFieldSchema = new HCatFieldSchema(fieldName, HCatFieldSchema.Type.ARRAY, subSchema, null);
} else if (Category.MAP == typeCategory) {
HCatSchema subSchema = getHCatSchema(((MapTypeInfo) fieldTypeInfo).getMapValueTypeInfo());
hCatFieldSchema = HCatFieldSchema.createMapTypeFieldSchema(fieldName,
(PrimitiveTypeInfo)((MapTypeInfo)fieldTypeInfo).getMapKeyTypeInfo(), subSchema, null);
} else {
throw new TypeNotPresentException(fieldTypeInfo.getTypeName(), null);
}
return hCatFieldSchema;
}
public static HCatSchema getHCatSchema(Schema schema) throws HCatException {
return getHCatSchema(schema.getFieldSchemas());
}
public static HCatSchema getHCatSchema(List<? extends FieldSchema> fslist) throws HCatException {
CollectionBuilder builder = getStructSchemaBuilder();
for (FieldSchema fieldSchema : fslist) {
builder.addField(fieldSchema);
}
return builder.build();
}
private static HCatSchema constructHCatSchema(StructTypeInfo stypeInfo) throws HCatException {
CollectionBuilder builder = getStructSchemaBuilder();
for (String fieldName : stypeInfo.getAllStructFieldNames()) {
builder.addField(getHCatFieldSchema(fieldName, stypeInfo.getStructFieldTypeInfo(fieldName)));
}
return builder.build();
}
public static HCatSchema getHCatSchema(TypeInfo typeInfo) throws HCatException {
Category typeCategory = typeInfo.getCategory();
HCatSchema hCatSchema;
if (Category.PRIMITIVE == typeCategory) {
hCatSchema = getStructSchemaBuilder().addField(new HCatFieldSchema(null, (PrimitiveTypeInfo)typeInfo, null)).build();
} else if (Category.STRUCT == typeCategory) {
HCatSchema subSchema = constructHCatSchema((StructTypeInfo) typeInfo);
hCatSchema = getStructSchemaBuilder().addField(new HCatFieldSchema(null, Type.STRUCT, subSchema, null)).build();
} else if (Category.LIST == typeCategory) {
CollectionBuilder builder = getListSchemaBuilder();
builder.addField(getHCatFieldSchema(null, ((ListTypeInfo) typeInfo).getListElementTypeInfo()));
hCatSchema = new HCatSchema(Arrays.asList(new HCatFieldSchema("", Type.ARRAY, builder.build(), "")));
} else if (Category.MAP == typeCategory) {
HCatSchema subSchema = getHCatSchema(((MapTypeInfo) typeInfo).getMapValueTypeInfo());
MapBuilder builder = getMapSchemaBuilder();
hCatSchema = builder.withKeyType((PrimitiveTypeInfo)((MapTypeInfo) typeInfo).getMapKeyTypeInfo())
.withValueSchema(subSchema).build();
} else {
throw new TypeNotPresentException(typeInfo.getTypeName(), null);
}
return hCatSchema;
}
public static HCatSchema getHCatSchemaFromTypeString(String typeString) throws HCatException {
return getHCatSchema(TypeInfoUtils.getTypeInfoFromTypeString(typeString));
}
public static HCatSchema getHCatSchema(String schemaString) throws HCatException {
if ((schemaString == null) || (schemaString.trim().isEmpty())) {
return new HCatSchema(new ArrayList<HCatFieldSchema>()); // empty HSchema construct
}
HCatSchema outerSchema = getHCatSchemaFromTypeString("struct<" + schemaString + ">");
return outerSchema.get(0).getStructSubSchema();
}
public static FieldSchema getFieldSchema(HCatFieldSchema hcatFieldSchema) {
return new FieldSchema(hcatFieldSchema.getName(), hcatFieldSchema.getTypeString(), hcatFieldSchema.getComment());
}
public static List<FieldSchema> getFieldSchemas(List<HCatFieldSchema> hcatFieldSchemas) {
List<FieldSchema> lfs = new ArrayList<FieldSchema>();
for (HCatFieldSchema hfs : hcatFieldSchemas) {
lfs.add(getFieldSchema(hfs));
}
return lfs;
}
}
| |
package com.rest.client.app.activities;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import android.app.Activity;
import android.app.SearchManager;
import android.app.SearchableInfo;
import android.content.Intent;
import android.databinding.DataBindingUtil;
import android.os.Bundle;
import android.provider.SearchRecentSuggestions;
import android.support.design.widget.Snackbar;
import android.support.v4.app.ActivityCompat;
import android.support.v4.view.MenuItemCompat;
import android.support.v4.view.ViewCompat;
import android.support.v4.widget.SwipeRefreshLayout.OnRefreshListener;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.SearchView;
import android.support.v7.widget.SearchView.OnQueryTextListener;
import android.text.Html;
import android.text.TextUtils;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.inputmethod.InputMethodManager;
import com.chopping.activities.RestfulActivity;
import com.chopping.rest.RestFireManager;
import com.rest.client.R;
import com.rest.client.app.SearchSuggestionProvider;
import com.rest.client.app.adapters.PhotoListAdapter;
import com.rest.client.databinding.PhotosBinding;
import com.rest.client.ds.Photo;
import com.rest.client.ds.PhotoDB;
import io.realm.RealmObject;
import io.realm.RealmQuery;
import io.realm.RealmResults;
import io.realm.Sort;
public class PhotosActivity extends RestfulActivity {
private static RestFireManager sFireMgr;
/**
* Data-binding.
*/
private PhotosBinding mBinding;
/**
* Main layout for this component.
*/
private static final int LAYOUT = R.layout.activity_photos;
/**
* Message holder.
*/
private Snackbar mSnackbar;
/**
* Suggestion list while tipping.
*/
protected SearchRecentSuggestions mSuggestions;
/**
* Keyword that will be searched.
*/
private String mKeyword = "";
/**
* The search.
*/
private SearchView mSearchView;
/**
* Search menu.
*/
private MenuItem mSearchMenu;
/**
* Show single instance of {@link PhotosActivity}
*
* @param cxt
* {@link Activity}.
*/
public static void showInstance( Activity cxt ) {
Intent intent = new Intent(
cxt,
PhotosActivity.class
);
intent.setFlags( Intent.FLAG_ACTIVITY_SINGLE_TOP|Intent.FLAG_ACTIVITY_CLEAR_TOP );
ActivityCompat.startActivity(
cxt,
intent,
null
);
}
@Override
protected void loadList() {
sFireMgr.selectAll( Photo.class );
}
@Override
protected void sendPending() {
}
@Override
protected void initDataBinding() {
mBinding = DataBindingUtil.setContentView(
this,
LAYOUT
);
setSupportActionBar( mBinding.toolbar );
mSnackbar = Snackbar.make(
mBinding.rootView,
"Getting photos ...",
Snackbar.LENGTH_INDEFINITE
);
mSnackbar.show();
}
@Override
protected void queryLocalData() {
mBinding.loadingPb.setVisibility( View.VISIBLE );
mBinding.responsesRv.setLayoutManager( new LinearLayoutManager( this ) );
super.queryLocalData();
}
@Override
protected void buildViews() {
if( isDataLoaded() ) {
if( mBinding.getAdapter() == null ) {
mBinding.setAdapter( new PhotoListAdapter() );
}
if( mBinding.getAdapter()
.getData() == null ) {
mBinding.getAdapter()
.setData( getData() );
}
mBinding.getAdapter()
.notifyDataSetChanged();
if( mSnackbar != null && mSnackbar.isShown() ) {
mSnackbar.dismiss();
}
}
mBinding.contentSrl.setRefreshing( false );
mBinding.loadingPb.setVisibility( View.GONE );
}
protected Class<? extends RealmObject> getDataClazz() {
return PhotoDB.class;
}
//onNetworkConnected() ignored.
@Override
public boolean onCreateOptionsMenu( Menu menu ) {
getMenuInflater().inflate(
R.menu.menu_photos,
menu
);
//Search
mSearchMenu = menu.findItem( R.id.action_search );
MenuItemCompat.setOnActionExpandListener(
mSearchMenu,
new MenuItemCompat.OnActionExpandListener() {
@Override
public boolean onMenuItemActionExpand( MenuItem item ) {
return true;
}
@Override
public boolean onMenuItemActionCollapse( MenuItem item ) {
mKeyword = "";
doSearch();
return true;
}
}
);
mSearchView = (SearchView) MenuItemCompat.getActionView( mSearchMenu );
mSearchView.setOnQueryTextListener( new OnQueryTextListener() {
@Override
public boolean onQueryTextChange( String newText ) {
if( TextUtils.isEmpty( newText ) ) {
mKeyword = null;
doSearch();
}
return false;
}
@Override
public boolean onQueryTextSubmit( String s ) {
InputMethodManager mgr = (InputMethodManager) getSystemService( INPUT_METHOD_SERVICE );
mgr.hideSoftInputFromWindow(
mSearchView.getWindowToken(),
0
);
resetSearchView();
return false;
}
} );
mSearchView.setIconifiedByDefault( true );
SearchManager searchManager = (SearchManager) getSystemService( SEARCH_SERVICE );
if( searchManager != null ) {
SearchableInfo info = searchManager.getSearchableInfo( getComponentName() );
mSearchView.setSearchableInfo( info );
}
return true;
}
/**
* Reset the UI status of searchview.
*/
protected void resetSearchView() {
if( mSearchView != null ) {
mSearchView.clearFocus();
}
}
/**
* Search for a photo.
*/
private void doSearch() {
mBinding.getAdapter()
.setData( null );
queryLocalData();
}
@Override
protected void buildQuery( RealmQuery<? extends RealmObject> q ) {
if( !TextUtils.isEmpty( mKeyword ) ) {
q.contains(
"description",
mKeyword
);
}
}
@Override
protected RealmResults<? extends RealmObject> createQuery( RealmQuery<? extends RealmObject> q ) {
RealmResults<? extends RealmObject> results = q.findAllSortedAsync(
"date",
Sort.DESCENDING
);
return results;
}
@Override
public boolean onOptionsItemSelected( MenuItem item ) {
int id = item.getItemId();
switch( id ) {
case R.id.action_vector_image:
VectorImageActivity.showInstance( this );
return true;
case R.id.action_fire_example:
MainActivity.showInstance( this );
return true;
case R.id.action_api_example:
MainActivity2.showInstance( this );
return true;
case R.id.action_photo_calendar:
PhotoCalendarActivity.showInstance( this );
return true;
}
return super.onOptionsItemSelected( item );
}
//[Begin for detecting scrolling onto bottom]
private int mVisibleItemCount;
private int mPastVisibleItems;
private int mTotalItemCount;
//[End]
@Override
protected void onCreate( Bundle savedInstanceState ) {
if( sFireMgr == null ) {
String url = null;
String auth = null;
String limitLast = null;
Properties prop = new Properties();
InputStream input = null;
try {
/*From "resources".*/
input = getApplication().getClassLoader()
.getResourceAsStream( "firebase2.properties" );
if( input != null ) {
// load a properties file
prop.load( input );
url = prop.getProperty( "firebase_url" );
auth = prop.getProperty( "firebase_auth" );
limitLast = prop.getProperty( "firebase_standard_limit" );
}
} catch( IOException ex ) {
ex.printStackTrace();
} finally {
if( input != null ) {
try {
input.close();
} catch( IOException e ) {
e.printStackTrace();
}
}
}
sFireMgr = new RestFireManager(
url,
auth,
Integer.valueOf( limitLast )
);
sFireMgr.onCreate( getApplication() );
}
super.onCreate( savedInstanceState );
mBinding.contentSrl.setColorSchemeResources(
R.color.color_pocket_1,
R.color.color_pocket_2,
R.color.color_pocket_3,
R.color.color_pocket_4
);
mBinding.contentSrl.setOnRefreshListener( new OnRefreshListener() {
@Override
public void onRefresh() {
loadList();
}
} );
//For search and suggestions.
mSuggestions = new SearchRecentSuggestions(
this,
getString( R.string.suggestion_auth ),
SearchSuggestionProvider.MODE
);
mBinding.fab.hide();
mBinding.fab.setOnClickListener( new OnClickListener() {
@Override
public void onClick( View v ) {
PhotoDB photoMin = (PhotoDB) getData().get( getData().size() - 1 );
sFireMgr.selectFrom(new Photo().newFromDB( photoMin));
mBinding.contentSrl.setRefreshing( true );
if( mBinding.fab.isShown() ) {
mBinding.fab.hide();
}
}
} );
mBinding.responsesRv.addOnScrollListener( new RecyclerView.OnScrollListener() {
@Override
public void onScrolled( RecyclerView recyclerView, int dx, int dy ) {
//Calc whether the list has been scrolled on bottom,
//this lets app to getting next page.
LinearLayoutManager linearLayoutManager = (LinearLayoutManager) recyclerView.getLayoutManager();
mVisibleItemCount = linearLayoutManager.getChildCount();
mTotalItemCount = linearLayoutManager.getItemCount();
mPastVisibleItems = linearLayoutManager.findFirstVisibleItemPosition();
if( ViewCompat.getY( recyclerView ) < dy ) {
if( ( mVisibleItemCount + mPastVisibleItems ) == mTotalItemCount ) {
if( !mBinding.fab.isShown() ) {
mBinding.fab.show();
}
}
} else {
if( mBinding.fab.isShown() ) {
mBinding.fab.hide();
}
}
}
} );
}
@Override
protected void onNewIntent( Intent intent ) {
super.onNewIntent( intent );
setIntent( intent );
mKeyword = intent.getStringExtra( SearchManager.QUERY );
if( !TextUtils.isEmpty( mKeyword ) ) {
mKeyword = mKeyword.trim();
mSearchView.setQueryHint( Html.fromHtml( "<font color = #ffffff>" + mKeyword + "</font>" ) );
mKeyword = intent.getStringExtra( SearchManager.QUERY );
mKeyword = mKeyword.trim();
resetSearchView();
//No save for suggestions.
mSuggestions.saveRecentQuery(
mKeyword,
null
);
//Move map to searched location.
doSearch();
}
}
}
| |
/*
*
*/
package net.community.chest.ui.components.datetime;
import java.awt.Color;
import java.awt.Component;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.Set;
import javax.swing.BorderFactory;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import net.community.chest.awt.dom.converter.InsetsValueInstantiator;
import net.community.chest.dom.impl.StandaloneDocumentImpl;
import net.community.chest.swing.event.ChangeListenerSet;
import net.community.chest.ui.helpers.panel.PresetGridLayoutPanel;
import net.community.chest.util.datetime.DateUtil;
import net.community.chest.util.datetime.DaysValues;
import net.community.chest.util.datetime.MonthsValues;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* <P>Provides a panel populated with the days of the month buttons
* along with ability to monitor selection changes</P>
*
* @author Lyor G.
* @since Dec 14, 2008 2:20:11 PM
*/
public class DayOfMonthValuePanel extends PresetGridLayoutPanel implements ActionListener {
/**
*
*/
private static final long serialVersionUID = -7289165807393951464L;
private int _year /* =0 */;
public int getYear ()
{
return _year;
}
public void setYear (int year)
{
if (_year != year)
_year = year;
}
private MonthsValues _month /* =null */;
public MonthsValues getMonth ()
{
return _month;
}
public void setMonth (MonthsValues month)
{
if (_month != month) // debug breakpoint
_month = month;
}
public MonthsValues setMonth (final int mthValue)
{
final MonthsValues m=MonthsValues.fromCalendarValue(mthValue);
if (m != null)
setMonth(m);
return m;
}
private int _day /* =0 */;
public int getDay ()
{
return _day;
}
public void setDay (int day)
{
if (_day != day) // debug breakpoint
_day = day;
}
public boolean isValidSelectedDate ()
{
final MonthsValues m=getMonth();
final int startDay=getDay(),
startYear=getYear(),
startMonth=(null == m) ? (-1) : m.getCalendarFieldId(),
numDays=DateUtil.getDaysPerMonth(startMonth, startYear);
if ((startDay <= 0) || (startYear <= 0) || (startDay > numDays)
|| (startMonth < Calendar.JANUARY) || (startMonth > Calendar.DECEMBER))
return false;
return true;
}
// returns input parameter if successfully updated, null otherwise
public Calendar updateSelectedDate (final Calendar cal)
{
if (null == cal)
return cal;
final MonthsValues m=getMonth();
final int startDay=getDay(),
startYear=getYear(),
startMonth=(null == m) ? (-1) : m.getCalendarFieldId(),
numDays=DateUtil.getDaysPerMonth(startMonth, startYear);
if ((startDay <= 0) || (startYear <= 0) || (startDay > numDays)
|| (startMonth < Calendar.JANUARY) || (startMonth > Calendar.DECEMBER))
return null;
cal.clear();
cal.set(startYear, startMonth, startDay, 0, 0, 0);
final long v=cal.getTimeInMillis(); // force re-calculation
if (v <= 0L) // debug breakpoint
return cal;
return cal;
}
public Calendar getSelectedDate ()
{
return isValidSelectedDate() ? updateSelectedDate(Calendar.getInstance()) : null;
}
public void setSelectedDate (final int startDay, final int startMonth, final int startYear)
{
setDay(startDay);
setMonth(startMonth);
setYear(startYear);
}
public void setSelectedDate (final Calendar c)
{
final Calendar calStart=getSelectionValue(c);
final int startDay=calStart.get(Calendar.DAY_OF_MONTH),
startMonth=calStart.get(Calendar.MONTH),
startYear=calStart.get(Calendar.YEAR);
setSelectedDate(startDay, startMonth, startYear);
}
protected static Calendar getSelectionValue (Calendar c)
{
if (null == c)
return Calendar.getInstance();
else
return c;
}
// defaults for various options
public static final Color DEFAULT_SELECTED_DAY_FOREGROUND=Color.BLUE,
DEFAULT_SELECTED_DAY_BACKROUND=Color.WHITE,
DEFAULT_NORMAL_DAY_FOREGROUND=Color.BLACK,
DEFAULT_NORMAL_DAY_BACKGROUND=Color.WHITE;
private static final Color resolveColor (final Color curColor, final Color defColor)
{
return (null == curColor) ? defColor : curColor;
}
private Color _selDayFg;
public Color getSelectedDayForeground ()
{
return resolveColor(_selDayFg, DEFAULT_SELECTED_DAY_FOREGROUND);
}
public void setSelectedDayForeground (final Color c)
{
if (c != null)
_selDayFg = c;
}
private Color _selDayBg;
public Color getSelectedDayBackground ()
{
return resolveColor(_selDayBg, DEFAULT_SELECTED_DAY_BACKROUND);
}
public void setSelectedDayBackground (final Color c)
{
if (c != null)
_selDayBg = c;
}
private Color _nrmDayFg;
public Color getNormalDayForeground ()
{
return resolveColor(_nrmDayFg, DEFAULT_NORMAL_DAY_FOREGROUND);
}
public void setNormalDayForeground (final Color c)
{
if (c != null)
_nrmDayFg = c;
}
private Color _nrmDayBg;
public Color getNormalDayBackground ()
{
return resolveColor(_nrmDayBg, DEFAULT_NORMAL_DAY_BACKGROUND);
}
public void setNormalDayBackground (final Color c)
{
if (c != null)
_nrmDayBg = c;
}
protected static class DayButton extends JButton {
/**
*
*/
private static final long serialVersionUID = -560847711241126527L;
private final int _dayIndex;
public final int getDayIndex ()
{
return _dayIndex;
}
public DayButton (int dayIndex, boolean selected)
{
if (((_dayIndex=dayIndex) <= 0) || (dayIndex > DateUtil.MAX_DAYS_PER_MONTH))
throw new IllegalArgumentException("Bad day index: " + dayIndex);
setText(String.valueOf(dayIndex));
setSelected(selected);
}
public DayButton (int dayIndex)
{
this(dayIndex, false);
}
}
private ChangeEvent _selEvent /* =null */;
protected synchronized ChangeEvent getSelectionChangeEvent ()
{
if (null == _selEvent)
_selEvent = new ChangeEvent(this);
return _selEvent;
}
private Set<ChangeListener> _cl /* =null */;
// returns Collection of informed listeners
protected List<ChangeListener> fireSelectionChangedListeners ()
{
final List<ChangeListener> ll;
// we use a copy to avoid concurrent modifications
synchronized(this)
{
if ((null == _cl) || (_cl.size() <= 0))
return null;
ll = new ArrayList<ChangeListener>(_cl);
}
final ChangeEvent ce=getSelectionChangeEvent();
for (final ChangeListener l : ll)
{
if (l != null) // should not be otherwise
l.stateChanged(ce);
}
return ll;
}
/**
* Can be used to register for whenever the user makes a selection
* @param l The {@link ChangeListener} instance to be fired when selected
* day changes - ignored if <code>null</code> or already registered. The
* provided {@link ChangeEvent} contains this panel as the source object
* @return <code>true</code> if listener successfully registered
*/
public synchronized boolean addChangeListener (final ChangeListener l)
{
if (null == l)
return false;
else if (null == _cl)
_cl = new ChangeListenerSet();
else if (_cl.contains(l))
return false;
return _cl.add(l);
}
/**
* Can be used to un-register a selection listener
* @param l The {@link ChangeListener} instance to be un-registered
* ignored if <code>null</code> or not registered
* @return <code>true</code> if listener successfully un-registered
*/
public synchronized boolean removeChangeListener (final ChangeListener l)
{
if ((null == l) || (null == _cl))
return false;
return _cl.remove(l);
}
protected boolean isPlaceholderComponent (final Object o)
{
return (!(o instanceof DayButton));
}
protected void updateDeselectedDayComponent (final Component c)
{
if (c instanceof JButton)
{
final JButton b=(JButton) c;
b.setSelected(false);
b.setBackground(getNormalDayBackground());
b.setForeground(getNormalDayForeground());
}
}
protected void updateSelectedDayComponent (final Component c)
{
if (c instanceof JButton)
{
final JButton b=(JButton) c;
b.setSelected(true);
b.setBackground(getSelectedDayBackground());
b.setForeground(getSelectedDayForeground());
}
}
/*
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed (ActionEvent event)
{
final Object src=(null == event) ? null : event.getSource();
if (isPlaceholderComponent(src))
return; // debug breakpoint
final DayButton dbtn=(DayButton) src;
if (dbtn.isSelected())
return; // nothing to do if already selected
final Component[] ca=getComponents();
if ((ca != null) && (ca.length > 0))
{
for (final Component c : ca)
{
if (isPlaceholderComponent(c))
continue;
// we could have stopped at first selected button...
final JButton b=(JButton) c;
if ((null == b) || (!b.isSelected()))
continue;
updateDeselectedDayComponent(c);
}
}
updateSelectedDayComponent(dbtn);
setDay(dbtn.getDayIndex());
fireSelectionChangedListeners();
}
protected JComponent getDayValuePlaceHolder (final DaysValues dv)
{
if (null == dv)
return null;
final JButton dbtn=new JButton("");
dbtn.setFocusPainted(false);
dbtn.setMargin(InsetsValueInstantiator.NO_INSETS);
dbtn.setBorder(BorderFactory.createEmptyBorder());
dbtn.setOpaque(true);
dbtn.setVisible(false);
return dbtn;
}
protected JComponent getDayValueComponent (
final DaysValues dv, final int dIndex, final boolean selected)
{
if ((null == dv) || (dIndex <= 0) || (dIndex > DateUtil.MAX_DAYS_PER_MONTH))
return null;
final DayButton dbtn=new DayButton(dIndex);
dbtn.setFocusPainted(false);
dbtn.setMargin(InsetsValueInstantiator.NO_INSETS);
dbtn.setBorder(BorderFactory.createEmptyBorder());
dbtn.setOpaque(true);
dbtn.setVisible(true);
dbtn.setSelected(selected);
dbtn.setBackground(selected ? getSelectedDayBackground() : getNormalDayBackground());
dbtn.setForeground(selected ? getSelectedDayForeground() : getNormalDayForeground());
dbtn.addActionListener(this);
return dbtn;
}
// NOTE !!! does not change the selected value
public void setComponentValue (final int startDay, final MonthsValues m, final int startYear)
{
final int numComps=getComponentCount();
if (numComps > 0) // debug breakpoint
removeAll();
final int startMonth=(null == m) ? (-1) : m.getCalendarFieldId(),
startDOW=DateUtil.getDayOfWeekForDate(1, startMonth, startYear),
numDays=DateUtil.getDaysPerMonth(startMonth, startYear);
if ((startMonth < Calendar.JANUARY)
|| (startMonth > Calendar.DECEMBER)
|| (numDays <= 0)
|| (startDOW < Calendar.SUNDAY)
|| (startDOW > Calendar.SATURDAY))
return; // debug breakpoint
final DaysValues startDayValue=DaysValues.fromCalendarValue(startDOW);
for (int dIndex=1; dIndex <= numDays; )
{
for (int dwi=0; (dwi < DateUtil.DAYS_PER_WEEK) && (dIndex <= numDays); dwi++)
{
final DaysValues dv=DaysValues.VALUES.get(dwi);
boolean useDayValue=true;
// fill with spaces till day-of-week of 1st day of month reached
if (1 == dIndex)
{
if (!startDayValue.equals(dv))
useDayValue = false;
}
// fill with spaces till end of last week
else if (dIndex > numDays)
useDayValue = false;
final JComponent c=useDayValue
? getDayValueComponent(dv, dIndex, (startDay == dIndex))
: getDayValuePlaceHolder(dv)
;
if (c != null)
add(c);
if (useDayValue)
dIndex++;
}
}
setSelectedDate(startDay, startMonth, startYear);
updateUI();
fireSelectionChangedListeners();
}
public void setComponentValue (final int startDay, final int startMonth, final int startYear)
{
setComponentValue(startDay, MonthsValues.fromCalendarValue(startMonth), startYear);
}
public void setComponentDayValue (final int startDay)
{
setComponentValue(startDay, getMonth(), getYear());
}
public void setComponentMonthValue (final MonthsValues m)
{
setComponentValue(getDay(), m, getYear());
}
public void setComponentMonthValue (final int startMonth)
{
setComponentMonthValue(MonthsValues.fromCalendarValue(startMonth));
}
public void setComponentYearValue (final int startYear)
{
setComponentValue(getDay(), getMonth(), startYear);
}
// NOTE !!! updates the selected value(s)
public Calendar setComponentValue (final Calendar c)
{
final Calendar calStart=getSelectionValue(c);
final int startDay=calStart.get(Calendar.DAY_OF_MONTH),
startMonth=calStart.get(Calendar.MONTH),
startYear=calStart.get(Calendar.YEAR);
setComponentValue(startDay, startMonth, startYear);
return c;
}
/*
* @see net.community.chest.ui.helpers.panel.HelperPanel#layoutComponent(org.w3c.dom.Element)
*/
@Override
public void layoutComponent (Element elem) throws RuntimeException
{
super.layoutComponent(elem);
final Calendar curDate=getSelectedDate();
setComponentValue(curDate);
}
public DayOfMonthValuePanel (Document doc, boolean autoLayout)
{
super(0, DateUtil.DAYS_PER_WEEK, 5, 5, doc, autoLayout);
}
public DayOfMonthValuePanel (Document doc)
{
this(doc, true);
}
public DayOfMonthValuePanel (Element elem, boolean autoLayout)
{
this((null == elem) ? null : new StandaloneDocumentImpl(elem), autoLayout);
}
public DayOfMonthValuePanel (Element elem)
{
this(elem, true);
}
public DayOfMonthValuePanel (boolean autoLayout)
{
this((Document) null, autoLayout);
}
public DayOfMonthValuePanel ()
{
this(true);
}
}
| |
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.connector.mysql;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import java.nio.charset.StandardCharsets;
import java.sql.Types;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.temporal.ChronoField;
import java.time.temporal.ChronoUnit;
import java.time.temporal.Temporal;
import java.util.Arrays;
import java.util.List;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.errors.ConnectException;
import org.apache.kafka.connect.source.SourceRecord;
import com.github.shyiko.mysql.binlog.event.deserialization.AbstractRowsEventDataDeserializer;
import com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary;
import com.mysql.jdbc.CharsetMapping;
import io.debezium.annotation.Immutable;
import io.debezium.data.Json;
import io.debezium.jdbc.JdbcValueConverters;
import io.debezium.relational.Column;
import io.debezium.relational.ValueConverter;
import io.debezium.time.Year;
import io.debezium.util.Strings;
import mil.nga.wkb.geom.Point;
import mil.nga.wkb.util.WkbException;
/**
* MySQL-specific customization of the conversions from JDBC values obtained from the MySQL binlog client library.
* <p>
* This class always uses UTC for the default time zone when converting values without timezone information to values that require
* timezones. This is because MySQL {@code TIMESTAMP} values are always
* <a href="https://dev.mysql.com/doc/refman/5.7/en/datetime.html">stored in UTC</a> (unlike {@code DATETIME} values) and
* are replicated in this form. Meanwhile, the MySQL Binlog Client library will {@link AbstractRowsEventDataDeserializer
* deserialize} these as {@link java.sql.Timestamp} values that have no timezone and, therefore, are presumed to be in UTC.
* When the column is properly marked with a {@link Types#TIMESTAMP_WITH_TIMEZONE} type, the converters will need to convert
* that {@link java.sql.Timestamp} value into an {@link OffsetDateTime} using the default time zone, which always is UTC.
*
* @author Randall Hauch
* @see com.github.shyiko.mysql.binlog.event.deserialization.AbstractRowsEventDataDeserializer
*/
@Immutable
public class MySqlValueConverters extends JdbcValueConverters {
/**
* A utility method that adjusts <a href="https://dev.mysql.com/doc/refman/5.7/en/two-digit-years.html">ambiguous</a> 2-digit
* year values of DATETIME, DATE, and TIMESTAMP types using these MySQL-specific rules:
* <ul>
* <li>Year values in the range 00-69 are converted to 2000-2069.</li>
* <li>Year values in the range 70-99 are converted to 1970-1999.</li>
* </ul>
*
* @param temporal the temporal instance to adjust; may not be null
* @return the possibly adjusted temporal instance; never null
*/
protected static Temporal adjustTemporal(Temporal temporal) {
if (temporal.isSupported(ChronoField.YEAR)) {
int year = temporal.get(ChronoField.YEAR);
if (0 <= year && year <= 69) {
temporal = temporal.plus(2000, ChronoUnit.YEARS);
} else if (70 <= year && year <= 99) {
temporal = temporal.plus(1900, ChronoUnit.YEARS);
}
}
return temporal;
}
/**
* A utility method that adjusts <a href="https://dev.mysql.com/doc/refman/5.7/en/two-digit-years.html">ambiguous</a> 2-digit
* year values of YEAR type using these MySQL-specific rules:
* <ul>
* <li>Year values in the range 01-69 are converted to 2001-2069.</li>
* <li>Year values in the range 70-99 are converted to 1970-1999.</li>
* </ul>
* MySQL treats YEAR(4) the same, except that a numeric 00 inserted into YEAR(4) results in 0000 rather than 2000; to
* specify zero for YEAR(4) and have it be interpreted as 2000, specify it as a string '0' or '00'. This should be handled
* by MySQL before Debezium sees the value.
*
* @param year the year value to adjust; may not be null
* @return the possibly adjusted year number; never null
*/
protected static int adjustYear(int year) {
if (0 < year && year <= 69) {
year += 2000;
} else if (70 <= year && year <= 99) {
year += 1900;
}
return year;
}
/**
* Create a new instance that always uses UTC for the default time zone when converting values without timezone information
* to values that require timezones.
* <p>
*
* @param decimalMode how {@code DECIMAL} and {@code NUMERIC} values should be treated; may be null if
* {@link io.debezium.jdbc.JdbcValueConverters.DecimalMode#PRECISE} is to be used
* @param adaptiveTimePrecision {@code true} if the time, date, and timestamp values should be based upon the precision of the
* database columns using {@link io.debezium.time} semantic types, or {@code false} if they should be fixed to
* millisecond precision using Kafka Connect {@link org.apache.kafka.connect.data} logical types.
*/
public MySqlValueConverters(DecimalMode decimalMode, boolean adaptiveTimePrecision) {
this(decimalMode, adaptiveTimePrecision, ZoneOffset.UTC);
}
/**
* Create a new instance, and specify the time zone offset that should be used only when converting values without timezone
* information to values that require timezones. This default offset should not be needed when values are highly-correlated
* with the expected SQL/JDBC types.
*
* @param decimalMode how {@code DECIMAL} and {@code NUMERIC} values should be treated; may be null if
* {@link io.debezium.jdbc.JdbcValueConverters.DecimalMode#PRECISE} is to be used
* @param adaptiveTimePrecision {@code true} if the time, date, and timestamp values should be based upon the precision of the
* database columns using {@link io.debezium.time} semantic types, or {@code false} if they should be fixed to
* millisecond precision using Kafka Connect {@link org.apache.kafka.connect.data} logical types.
* @param defaultOffset the zone offset that is to be used when converting non-timezone related values to values that do
* have timezones; may be null if UTC is to be used
*/
public MySqlValueConverters(DecimalMode decimalMode, boolean adaptiveTimePrecision, ZoneOffset defaultOffset) {
super(decimalMode, adaptiveTimePrecision, defaultOffset, MySqlValueConverters::adjustTemporal);
}
@Override
protected ByteOrder byteOrderOfBitType() {
return ByteOrder.BIG_ENDIAN;
}
@Override
public SchemaBuilder schemaBuilder(Column column) {
// Handle a few MySQL-specific types based upon how they are handled by the MySQL binlog client ...
String typeName = column.typeName().toUpperCase();
if (matches(typeName, "JSON")) {
return Json.builder();
}
if (matches(typeName, "POINT")) {
return io.debezium.data.geometry.Point.builder();
}
if (matches(typeName, "YEAR")) {
return Year.builder();
}
if (matches(typeName, "ENUM")) {
String commaSeperatedOptions = extractEnumAndSetOptionsAsString(column);
return io.debezium.data.Enum.builder(commaSeperatedOptions);
}
if (matches(typeName, "SET")) {
String commaSeperatedOptions = extractEnumAndSetOptionsAsString(column);
return io.debezium.data.EnumSet.builder(commaSeperatedOptions);
}
// Otherwise, let the base class handle it ...
return super.schemaBuilder(column);
}
@Override
public ValueConverter converter(Column column, Field fieldDefn) {
// Handle a few MySQL-specific types based upon how they are handled by the MySQL binlog client ...
String typeName = column.typeName().toUpperCase();
if (matches(typeName, "JSON")) {
return (data) -> convertJson(column, fieldDefn, data);
}
if (matches(typeName, "POINT")){
return (data -> convertPoint(column, fieldDefn, data));
}
if (matches(typeName, "YEAR")) {
return (data) -> convertYearToInt(column, fieldDefn, data);
}
if (matches(typeName, "ENUM")) {
// Build up the character array based upon the column's type ...
List<String> options = extractEnumAndSetOptions(column);
return (data) -> convertEnumToString(options, column, fieldDefn, data);
}
if (matches(typeName, "SET")) {
// Build up the character array based upon the column's type ...
List<String> options = extractEnumAndSetOptions(column);
return (data) -> convertSetToString(options, column, fieldDefn, data);
}
// We have to convert bytes encoded in the column's character set ...
switch (column.jdbcType()) {
case Types.CHAR: // variable-length
case Types.VARCHAR: // variable-length
case Types.LONGVARCHAR: // variable-length
case Types.CLOB: // variable-length
case Types.NCHAR: // fixed-length
case Types.NVARCHAR: // fixed-length
case Types.LONGNVARCHAR: // fixed-length
case Types.NCLOB: // fixed-length
case Types.DATALINK:
case Types.SQLXML:
Charset charset = charsetFor(column);
if (charset != null) {
logger.debug("Using {} charset by default for column: {}", charset, column);
return (data) -> convertString(column, fieldDefn, charset, data);
}
logger.warn("Using UTF-8 charset by default for column without charset: {}", column);
return (data) -> convertString(column, fieldDefn, StandardCharsets.UTF_8, data);
default:
break;
}
// Otherwise, let the base class handle it ...
return super.converter(column, fieldDefn);
}
/**
* Return the {@link Charset} instance with the MySQL-specific character set name used by the given column.
*
* @param column the column in which the character set is used; never null
* @return the Java {@link Charset}, or null if there is no mapping
*/
protected Charset charsetFor(Column column) {
String mySqlCharsetName = column.charsetName();
if (mySqlCharsetName == null) {
logger.warn("Column is missing a character set: {}", column);
return null;
}
String encoding = CharsetMapping.getJavaEncodingForMysqlCharset(mySqlCharsetName);
if (encoding == null) {
logger.warn("Column uses MySQL character set '{}', which has no mapping to a Java character set", mySqlCharsetName);
} else {
try {
return Charset.forName(encoding);
} catch (IllegalCharsetNameException e) {
logger.error("Unable to load Java charset '{}' for column with MySQL character set '{}'", encoding, mySqlCharsetName);
}
}
return null;
}
/**
* Convert the {@link String} {@code byte[]} value to a string value used in a {@link SourceRecord}.
*
* @param column the column in which the value appears
* @param fieldDefn the field definition for the {@link SourceRecord}'s {@link Schema}; never null
* @param data the data; may be null
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertJson(Column column, Field fieldDefn, Object data) {
if (data == null) {
data = fieldDefn.schema().defaultValue();
}
if (data == null) {
if (column.isOptional()) return null;
return "{}";
}
if (data instanceof byte[]) {
// The BinlogReader sees these JSON values as binary encoded, so we use the binlog client library's utility
// to parse MySQL's internal binary representation into a JSON string, using the standard formatter.
try {
String json = JsonBinary.parseAsString((byte[]) data);
return json;
} catch (IOException e) {
throw new ConnectException("Failed to parse and read a JSON value on " + column + ": " + e.getMessage(), e);
}
}
if (data instanceof String) {
// The SnapshotReader sees JSON values as UTF-8 encoded strings.
return data;
}
return handleUnknownData(column, fieldDefn, data);
}
/**
* Convert the {@link String} or {@code byte[]} value to a string value used in a {@link SourceRecord}.
*
* @param column the column in which the value appears
* @param fieldDefn the field definition for the {@link SourceRecord}'s {@link Schema}; never null
* @param columnCharset the Java character set in which column byte[] values are encoded; may not be null
* @param data the data; may be null
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertString(Column column, Field fieldDefn, Charset columnCharset, Object data) {
if (data == null) {
data = fieldDefn.schema().defaultValue();
}
if (data == null) {
if (column.isOptional()) return null;
return "";
}
if (data instanceof byte[]) {
// Decode the binary representation using the given character encoding ...
return new String((byte[]) data, columnCharset);
}
if (data instanceof String) {
return data;
}
return handleUnknownData(column, fieldDefn, data);
}
/**
* Converts a value object for a MySQL {@code YEAR}, which appear in the binlog as an integer though returns from
* the MySQL JDBC driver as either a short or a {@link java.sql.Date}.
*
* @param column the column definition describing the {@code data} value; never null
* @param fieldDefn the field definition; never null
* @param data the data object to be converted into a year literal integer value; never null
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
@SuppressWarnings("deprecation")
protected Object convertYearToInt(Column column, Field fieldDefn, Object data) {
if (data == null) {
data = fieldDefn.schema().defaultValue();
}
if (data == null) {
if (column.isOptional()) return null;
return 0;
}
if (data instanceof java.time.Year) {
// The MySQL binlog always returns a Year object ...
return adjustYear(((java.time.Year) data).getValue());
}
if (data instanceof java.sql.Date) {
// MySQL JDBC driver sometimes returns a Java SQL Date object ...
return adjustYear(((java.sql.Date) data).getYear());
}
if (data instanceof Number) {
// MySQL JDBC driver sometimes returns a short ...
return adjustYear(((Number) data).intValue());
}
return handleUnknownData(column, fieldDefn, data);
}
/**
* Converts a value object for a MySQL {@code ENUM}, which is represented in the binlog events as an integer value containing
* the index of the enum option. The MySQL JDBC driver returns a string containing the option,
* so this method calculates the same.
*
* @param options the characters that appear in the same order as defined in the column; may not be null
* @param column the column definition describing the {@code data} value; never null
* @param fieldDefn the field definition; never null
* @param data the data object to be converted into an {@code ENUM} literal String value
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertEnumToString(List<String> options, Column column, Field fieldDefn, Object data) {
if (data == null) {
data = fieldDefn.schema().defaultValue();
}
if (data == null) {
if (column.isOptional()) return null;
return "";
}
if (data instanceof String) {
// JDBC should return strings ...
return data;
}
if (data instanceof Integer) {
if (options != null) {
// The binlog will contain an int with the 1-based index of the option in the enum value ...
int value = ((Integer) data).intValue();
if (value == 0) {
// an invalid value was specified, which corresponds to the empty string '' and an index of 0
return "";
}
int index = value - 1; // 'options' is 0-based
if (index < options.size() && index >= 0) {
return options.get(index);
}
}
return null;
}
return handleUnknownData(column, fieldDefn, data);
}
/**
* Converts a value object for a MySQL {@code SET}, which is represented in the binlog events contain a long number in which
* every bit corresponds to a different option. The MySQL JDBC driver returns a string containing the comma-separated options,
* so this method calculates the same.
*
* @param options the characters that appear in the same order as defined in the column; may not be null
* @param column the column definition describing the {@code data} value; never null
* @param fieldDefn the field definition; never null
* @param data the data object to be converted into an {@code SET} literal String value; never null
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertSetToString(List<String> options, Column column, Field fieldDefn, Object data) {
if (data == null) {
data = fieldDefn.schema().defaultValue();
}
if (data == null) {
if (column.isOptional()) return null;
return "";
}
if (data instanceof String) {
// JDBC should return strings ...
return data;
}
if (data instanceof Long) {
// The binlog will contain a long with the indexes of the options in the set value ...
long indexes = ((Long) data).longValue();
return convertSetValue(column, indexes, options);
}
return handleUnknownData(column, fieldDefn, data);
}
/**
* Determine if the uppercase form of a column's type exactly matches or begins with the specified prefix.
* Note that this logic works when the column's {@link Column#typeName() type} contains the type name followed by parentheses.
*
* @param upperCaseTypeName the upper case form of the column's {@link Column#typeName() type name}
* @param upperCaseMatch the upper case form of the expected type or prefix of the type; may not be null
* @return {@code true} if the type matches the specified type, or {@code false} otherwise
*/
protected boolean matches(String upperCaseTypeName, String upperCaseMatch) {
if (upperCaseTypeName == null) return false;
return upperCaseMatch.equals(upperCaseTypeName) || upperCaseTypeName.startsWith(upperCaseMatch + "(");
}
protected List<String> extractEnumAndSetOptions(Column column) {
return MySqlDdlParser.parseSetAndEnumOptions(column.typeExpression());
}
protected String extractEnumAndSetOptionsAsString(Column column) {
return Strings.join(",", extractEnumAndSetOptions(column));
}
protected String convertSetValue(Column column, long indexes, List<String> options) {
StringBuilder sb = new StringBuilder();
int index = 0;
boolean first = true;
int optionLen = options.size();
while (indexes != 0L) {
if (indexes % 2L != 0) {
if (first) {
first = false;
} else {
sb.append(',');
}
if (index < optionLen) {
sb.append(options.get(index));
} else {
logger.warn("Found unexpected index '{}' on column {}", index, column);
}
}
++index;
indexes = indexes >>> 1;
}
return sb.toString();
}
/**
* Convert the a value representing a POINT {@code byte[]} value to a Point value used in a {@link SourceRecord}.
*
* @param column the column in which the value appears
* @param fieldDefn the field definition for the {@link SourceRecord}'s {@link Schema}; never null
* @param data the data; may be null
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertPoint(Column column, Field fieldDefn, Object data){
if (data == null) {
data = fieldDefn.schema().defaultValue();
}
Schema schema = fieldDefn.schema();
if (data instanceof byte[]) {
// The binlog utility sends a byte array for any Geometry type, we will use our own binaryParse to parse the byte to WKB, hence
// to the suitable class
try {
MySqlGeometry mySqlGeometry = MySqlGeometry.fromBytes((byte[]) data);
Point point = mySqlGeometry.getPoint();
return io.debezium.data.geometry.Point.createValue(schema, point.getX(), point.getY(), mySqlGeometry.getWkb());
} catch (WkbException e) {
throw new ConnectException("Failed to parse and read a value of type POINT on " + column + ": " + e.getMessage(), e);
}
}
return handleUnknownData(column, fieldDefn, data);
}
@Override
protected ByteBuffer convertByteArray(Column column, byte[] data) {
// DBZ-254 right-pad fixed-length binary column values with 0x00 (zero byte)
if (column.jdbcType() == Types.BINARY && data.length < column.length()) {
data = Arrays.copyOf(data, column.length());
}
return super.convertByteArray(column, data);
}
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.map.impl.recordstore;
import com.hazelcast.config.InMemoryFormat;
import com.hazelcast.core.EntryView;
import com.hazelcast.internal.nearcache.impl.invalidation.InvalidationQueue;
import com.hazelcast.map.impl.MapContainer;
import com.hazelcast.map.impl.MapEntries;
import com.hazelcast.map.impl.iterator.MapEntriesWithCursor;
import com.hazelcast.map.impl.iterator.MapKeysWithCursor;
import com.hazelcast.map.impl.mapstore.MapDataStore;
import com.hazelcast.map.impl.record.Record;
import com.hazelcast.map.impl.record.RecordFactory;
import com.hazelcast.map.merge.MapMergePolicy;
import com.hazelcast.monitor.LocalRecordStoreStats;
import com.hazelcast.nio.serialization.Data;
import com.hazelcast.spi.exception.RetryableHazelcastException;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* Defines a record-store.
*/
public interface RecordStore<R extends Record> extends LocalRecordStoreStats {
/**
* Default TTL value of a record.
*/
long DEFAULT_TTL = -1L;
String getName();
Object put(Data dataKey, Object dataValue, long ttl);
Object putIfAbsent(Data dataKey, Object value, long ttl);
R putBackup(Data key, Object value);
/**
* @param key the key to be processed.
* @param value the value to be processed.
* @param ttl milliseconds. Check out {@link com.hazelcast.map.impl.proxy.MapProxySupport#putInternal}
* @param putTransient {@code true} if putting transient entry, otherwise {@code false}
* @return previous record if exists otherwise null.
*/
R putBackup(Data key, Object value, long ttl, boolean putTransient);
/**
* Returns {@code true} if key doesn't exist previously, otherwise returns {@code false}.
*
* @see com.hazelcast.core.IMap#set(Object, Object)
*/
boolean set(Data dataKey, Object value, long ttl);
Object remove(Data dataKey);
boolean delete(Data dataKey);
boolean remove(Data dataKey, Object testValue);
/**
* Similar to {@link RecordStore#remove(com.hazelcast.nio.serialization.Data)}
* except removeBackup doesn't touch mapstore since it does not return previous value.
*/
void removeBackup(Data dataKey);
/**
* Gets record from {@link RecordStore}.
* Loads missing keys from map store.
*
* @param dataKey key.
* @param backup <code>true</code> if a backup partition, otherwise <code>false</code>.
* @return value of an entry in {@link RecordStore}
*/
Object get(Data dataKey, boolean backup);
/**
* Called when {@link com.hazelcast.config.MapConfig#isReadBackupData} is <code>true</code> from
* {@link com.hazelcast.map.impl.proxy.MapProxySupport#getInternal}
* <p/>
* Returns corresponding value for key as {@link com.hazelcast.nio.serialization.Data}.
* This adds an extra serialization step. For the reason of this behaviour please see issue 1292 on github.
*
* @param key key to be accessed
* @return value as {@link com.hazelcast.nio.serialization.Data}
* independent of {@link com.hazelcast.config.InMemoryFormat}
*/
Data readBackupData(Data key);
MapEntries getAll(Set<Data> keySet);
/**
* Checks if the key exist in memory without trying to load data from map-loader
*/
boolean existInMemory(Data key);
boolean containsKey(Data dataKey);
int getLockedEntryCount();
Object replace(Data dataKey, Object update);
/**
* Sets the value to the given updated value
* if {@link com.hazelcast.map.impl.record.RecordFactory#isEquals} comparison
* of current value and expected value is {@code true}.
*
* @param dataKey key which's value is requested to be replaced.
* @param expect the expected value
* @param update the new value
* @return {@code true} if successful. False return indicates that
* the actual value was not equal to the expected value.
*/
boolean replace(Data dataKey, Object expect, Object update);
Object putTransient(Data dataKey, Object value, long ttl);
/**
* Puts key-value pair to map which is the result of a load from map store operation.
*
* @param key key to put.
* @param value to put.
* @return the previous value associated with <tt>key</tt>, or
* <tt>null</tt> if there was no mapping for <tt>key</tt>.
* @see com.hazelcast.map.impl.operation.PutFromLoadAllOperation
*/
Object putFromLoad(Data key, Object value);
/**
* Puts key-value pair to map which is the result of a load from map store operation on backup.
*
* @param key key to put.
* @param value to put.
* @return the previous value associated with <tt>key</tt>, or
* <tt>null</tt> if there was no mapping for <tt>key</tt>.
* @see com.hazelcast.map.impl.operation.PutFromLoadAllBackupOperation
*/
Object putFromLoadBackup(Data key, Object value);
/**
* Puts key-value pair to map which is the result of a load from map store operation.
*
* @param key key to put.
* @param value to put.
* @param ttl time to live seconds.
* @return the previous value associated with <tt>key</tt>, or
* <tt>null</tt> if there was no mapping for <tt>key</tt>.
* @see com.hazelcast.map.impl.operation.PutFromLoadAllOperation
*/
Object putFromLoad(Data key, Object value, long ttl);
boolean merge(Data dataKey, EntryView mergingEntryView, MapMergePolicy mergePolicy);
R getRecord(Data key);
/**
* Puts a data key and a record value to record-store.
* Used in replication operations.
*
* @param key the data key to put record store.
* @param record the value for record store.
* @see com.hazelcast.map.impl.operation.MapReplicationOperation
*/
void putRecord(Data key, R record);
/**
* Iterates over record store entries.
*
* @return read only iterator for map values.
*/
Iterator<Record> iterator();
/**
* Iterates over record store entries by respecting expiration.
*
* @return read only iterator for map values.
*/
Iterator<Record> iterator(long now, boolean backup);
/**
* Fetches specified number of keys from provided tableIndex.
*
* @return {@link MapKeysWithCursor} which is a holder for keys and next index to read from.
*/
MapKeysWithCursor fetchKeys(int tableIndex, int size);
/**
* Fetches specified number of entries from provided tableIndex.
*
* @return {@link MapEntriesWithCursor} which is a holder for entries and next index to read from.
*/
MapEntriesWithCursor fetchEntries(int tableIndex, int size);
/**
* Iterates over record store entries but first waits map store to load.
* If an operation needs to wait a data source load like query operations
* {@link com.hazelcast.core.IMap#keySet(com.hazelcast.query.Predicate)},
* this method can be used to return a read-only iterator.
*
* @param now current time in millis
* @param backup <code>true</code> if a backup partition, otherwise <code>false</code>.
* @return read only iterator for map values.
*/
Iterator<Record> loadAwareIterator(long now, boolean backup);
int size();
boolean txnLock(Data key, String caller, long threadId, long referenceId, long ttl, boolean blockReads);
boolean extendLock(Data key, String caller, long threadId, long ttl);
boolean localLock(Data key, String caller, long threadId, long referenceId, long ttl);
boolean lock(Data key, String caller, long threadId, long referenceId, long ttl);
boolean isLockedBy(Data key, String caller, long threadId);
boolean unlock(Data key, String caller, long threadId, long referenceId);
boolean isLocked(Data key);
boolean isTransactionallyLocked(Data key);
boolean canAcquireLock(Data key, String caller, long threadId);
String getLockOwnerInfo(Data key);
boolean containsValue(Object testValue);
Object evict(Data key, boolean backup);
/**
* Evicts all keys except locked ones.
*
* @param backup <code>true</code> if a backup partition, otherwise <code>false</code>.
* @return number of evicted entries.
*/
int evictAll(boolean backup);
MapContainer getMapContainer();
/**
* @see MapDataStore#softFlush()
*/
long softFlush();
/**
* Clears internal partition data.
*
* @param onShutdown true if {@code close} is called during MapService shutdown,
* false otherwise.
*/
void clearPartition(boolean onShutdown);
/**
* Resets the record store to it's initial state.
*/
void reset();
boolean forceUnlock(Data dataKey);
long getOwnedEntryCost();
boolean isLoaded();
void checkIfLoaded() throws RetryableHazelcastException;
int clear();
boolean isEmpty();
/**
* Do expiration operations.
*
* @param percentage of max expirables according to the record store size.
* @param backup <code>true</code> if a backup partition, otherwise <code>false</code>.
*/
void evictExpiredEntries(int percentage, boolean backup);
/**
* @return <code>true</code> if record store has at least one candidate entry
* for expiration else return <code>false</code>.
*/
boolean isExpirable();
/**
* Checks whether a record is expired or not.
*
* @param record the record from record-store.
* @param now current time in millis
* @param backup <code>true</code> if a backup partition, otherwise <code>false</code>.
* @return <code>true</code> if the record is expired, <code>false</code> otherwise.
*/
boolean isExpired(R record, long now, boolean backup);
/**
* Does post eviction operations like sending events
*
* @param record record to process
* @param backup <code>true</code> if a backup partition, otherwise <code>false</code>.
*/
void doPostEvictionOperations(Record record, boolean backup);
/**
* Loads all given keys from defined map store.
*
* @param keys keys to be loaded.
*/
void loadAllFromStore(List<Data> keys, boolean replaceExistingValues);
void updateLoadStatus(boolean lastBatch, Throwable exception);
MapDataStore<Data, Object> getMapDataStore();
int getPartitionId();
/**
* Returns live record or null if record is already expired. Does not load missing keys from a map store.
*
* @param key key to be accessed
* @return live record or null
* @see #get
*/
R getRecordOrNull(Data key);
/**
* Evicts entries from this record-store.
*
* @param excludedKey this key has lowest priority to be selected for eviction
*/
void evictEntries(Data excludedKey);
/**
* Returns <code>true</code> if eviction is allowed on this record-store, otherwise <code>false</code>
*
* @return <code>true</code> if eviction is allowed on this record-store, otherwise <code>false</code>
*/
boolean shouldEvict();
/**
* Loads all keys and values
*
* @param replaceExistingValues <code>true</code> if need to replace existing values otherwise <code>false</code>
**/
void loadAll(boolean replaceExistingValues);
/**
* Performs initial loading from a MapLoader if it has not been done before
**/
void maybeDoInitialLoad();
Storage createStorage(RecordFactory<R> recordFactory, InMemoryFormat memoryFormat);
Record createRecord(Object value, long ttlMillis, long now);
Record loadRecordOrNull(Data key, boolean backup);
/**
* This can be used to release unused resources.
*/
void disposeDeferredBlocks();
void destroy();
Storage getStorage();
/**
* Starts mapLoader
*/
void startLoading();
/**
* Informs this recordStore about the loading status of the recordStore that this store is migrated from.
* If the 'predecessor' has been loaded this record store should trigger the load again.
* Will be taken into account only if invoked before the startLoading method. Otherwise has no effect.
* <p>
* This method should be deleted when the map's lifecycle has been cleaned-up. Currently it's impossible to
* pass additional state when the record store is created, thus this this state has to be passed in post-creation
* setters which is cumbersome and error-prone.
*/
void setPreMigrationLoadedStatus(boolean loaded);
/**
* Initialize the recordStore after creation
*/
void init();
/**
* @return Returns true if key load has finished, false otherwise.
**/
boolean isKeyLoadFinished();
InvalidationQueue<ExpiredKey> getExpiredKeys();
}
| |
/*
* Copyright (c) 2005, 2006, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package com.sun.jmx.mbeanserver;
import java.security.AccessController;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import javax.management.AttributeNotFoundException;
import javax.management.InvalidAttributeValueException;
import javax.management.MBeanException;
import javax.management.MBeanInfo;
import javax.management.ReflectionException;
import static com.sun.jmx.mbeanserver.Util.*;
/**
* Per-MBean-interface behavior. A single instance of this class can be shared
* by all MBeans of the same kind (Standard MBean or MXBean) that have the same
* MBean interface.
*
* @since 1.6
*/
final class PerInterface<M> {
PerInterface(Class<?> mbeanInterface, MBeanIntrospector<M> introspector,
MBeanAnalyzer<M> analyzer, MBeanInfo mbeanInfo) {
this.mbeanInterface = mbeanInterface;
this.introspector = introspector;
this.mbeanInfo = mbeanInfo;
analyzer.visit(new InitMaps());
}
Class<?> getMBeanInterface() {
return mbeanInterface;
}
MBeanInfo getMBeanInfo() {
return mbeanInfo;
}
boolean isMXBean() {
return introspector.isMXBean();
}
Object getAttribute(Object resource, String attribute, Object cookie)
throws AttributeNotFoundException,
MBeanException,
ReflectionException {
final M cm = getters.get(attribute);
if (cm == null) {
final String msg;
if (setters.containsKey(attribute))
msg = "Write-only attribute: " + attribute;
else
msg = "No such attribute: " + attribute;
throw new AttributeNotFoundException(msg);
}
return introspector.invokeM(cm, resource, (Object[]) null, cookie);
}
void setAttribute(Object resource, String attribute, Object value,
Object cookie)
throws AttributeNotFoundException,
InvalidAttributeValueException,
MBeanException,
ReflectionException {
final M cm = setters.get(attribute);
if (cm == null) {
final String msg;
if (getters.containsKey(attribute))
msg = "Read-only attribute: " + attribute;
else
msg = "No such attribute: " + attribute;
throw new AttributeNotFoundException(msg);
}
introspector.invokeSetter(attribute, cm, resource, value, cookie);
}
Object invoke(Object resource, String operation, Object[] params,
String[] signature, Object cookie)
throws MBeanException, ReflectionException {
final List<MethodAndSig> list = ops.get(operation);
if (list == null) {
final String msg = "No such operation: " + operation;
return noSuchMethod(msg, resource, operation, params, signature,
cookie);
}
if (signature == null)
signature = new String[0];
MethodAndSig found = null;
for (MethodAndSig mas : list) {
if (Arrays.equals(mas.signature, signature)) {
found = mas;
break;
}
}
if (found == null) {
final String badSig = sigString(signature);
final String msg;
if (list.size() == 1) { // helpful exception message
msg = "Signature mismatch for operation " + operation +
": " + badSig + " should be " +
sigString(list.get(0).signature);
} else {
msg = "Operation " + operation + " exists but not with " +
"this signature: " + badSig;
}
return noSuchMethod(msg, resource, operation, params, signature,
cookie);
}
return introspector.invokeM(found.method, resource, params, cookie);
}
/*
* This method is called when invoke doesn't find the named method.
* Before throwing an exception, we check to see whether the
* jmx.invoke.getters property is set, and if so whether the method
* being invoked might be a getter or a setter. If so we invoke it
* and return the result. This is for compatibility
* with code based on JMX RI 1.0 or 1.1 which allowed invoking getters
* and setters. It is *not* recommended that new code use this feature.
*
* Since this method is either going to throw an exception or use
* functionality that is strongly discouraged, we consider that its
* performance is not very important.
*
* A simpler way to implement the functionality would be to add the getters
* and setters to the operations map when jmx.invoke.getters is set.
* However, that means that the property is consulted when an MBean
* interface is being introspected and not thereafter. Previously,
* the property was consulted on every invocation. So this simpler
* implementation could potentially break code that sets and unsets
* the property at different times.
*/
private Object noSuchMethod(String msg, Object resource, String operation,
Object[] params, String[] signature,
Object cookie)
throws MBeanException, ReflectionException {
// Construct the exception that we will probably throw
final NoSuchMethodException nsme =
new NoSuchMethodException(operation + sigString(signature));
final ReflectionException exception =
new ReflectionException(nsme, msg);
if (introspector.isMXBean())
throw exception; // No compatibility requirement here
// Is the compatibility property set?
GetPropertyAction act = new GetPropertyAction("jmx.invoke.getters");
String invokeGettersS;
try {
invokeGettersS = AccessController.doPrivileged(act);
} catch (Exception e) {
// We don't expect an exception here but if we get one then
// we'll simply assume that the property is not set.
invokeGettersS = null;
}
if (invokeGettersS == null)
throw exception;
int rest = 0;
Map<String, M> methods = null;
if (signature == null || signature.length == 0) {
if (operation.startsWith("get"))
rest = 3;
else if (operation.startsWith("is"))
rest = 2;
if (rest != 0)
methods = getters;
} else if (signature.length == 1 &&
operation.startsWith("set")) {
rest = 3;
methods = setters;
}
if (rest != 0) {
String attrName = operation.substring(rest);
M method = methods.get(attrName);
if (method != null && introspector.getName(method).equals(operation)) {
String[] msig = introspector.getSignature(method);
if ((signature == null && msig.length == 0) ||
Arrays.equals(signature, msig)) {
return introspector.invokeM(method, resource, params, cookie);
}
}
}
throw exception;
}
private String sigString(String[] signature) {
StringBuilder b = new StringBuilder("(");
if (signature != null) {
for (String s : signature) {
if (b.length() > 1)
b.append(", ");
b.append(s);
}
}
return b.append(")").toString();
}
/**
* Visitor that sets up the method maps (operations, getters, setters).
*/
private class InitMaps implements MBeanAnalyzer.MBeanVisitor<M> {
public void visitAttribute(String attributeName,
M getter,
M setter) {
if (getter != null) {
introspector.checkMethod(getter);
final Object old = getters.put(attributeName, getter);
assert(old == null);
}
if (setter != null) {
introspector.checkMethod(setter);
final Object old = setters.put(attributeName, setter);
assert(old == null);
}
}
public void visitOperation(String operationName,
M operation) {
introspector.checkMethod(operation);
final String[] sig = introspector.getSignature(operation);
final MethodAndSig mas = new MethodAndSig();
mas.method = operation;
mas.signature = sig;
List<MethodAndSig> list = ops.get(operationName);
if (list == null)
list = Collections.singletonList(mas);
else {
if (list.size() == 1)
list = newList(list);
list.add(mas);
}
ops.put(operationName, list);
}
}
private class MethodAndSig {
M method;
String[] signature;
}
private final Class<?> mbeanInterface;
private final MBeanIntrospector<M> introspector;
private final MBeanInfo mbeanInfo;
private final Map<String, M> getters = newMap();
private final Map<String, M> setters = newMap();
private final Map<String, List<MethodAndSig>> ops = newMap();
}
| |
package org.jgroups.protocols;
import org.jgroups.*;
import org.jgroups.annotations.*;
import org.jgroups.blocks.atomic.Counter;
import org.jgroups.stack.Protocol;
import org.jgroups.util.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.function.Supplier;
/**
* Protocol which is used by {@link org.jgroups.blocks.atomic.CounterService} to provide a distributed atomic counter
* @author Bela Ban
* @since 3.0.0
*/
@MBean(description="Protocol to maintain distributed atomic counters")
public class COUNTER extends Protocol {
@Property(description="Bypasses message bundling if true")
protected boolean bypass_bundling=true;
@Property(description="Request timeouts (in ms). If the timeout elapses, a Timeout (runtime) exception will be thrown")
protected long timeout=60000;
@Property(description="Number of milliseconds to wait for reconciliation responses from all current members")
protected long reconciliation_timeout=10000;
@Property(description="Number of backup coordinators. Modifications are asynchronously sent to all backup coordinators")
protected int num_backups=1;
protected Address local_addr;
/** Set to true during reconciliation process, will cause all requests to be discarded */
protected boolean discard_requests=false;
protected View view;
/** The address of the cluster coordinator. Updated on view changes */
protected Address coord;
/** Backup coordinators. Only created if num_backups > 0 and coord=true */
protected List<Address> backup_coords=null;
protected Future<?> reconciliation_task_future;
protected ReconciliationTask reconciliation_task;
// server side counters
protected final ConcurrentMap<String,VersionedValue> counters=Util.createConcurrentMap(20);
// (client side) pending requests
protected final Map<Owner,Tuple<Request,Promise>> pending_requests=Util.createConcurrentMap(20);
protected static final byte REQUEST = 1;
protected static final byte RESPONSE = 2;
protected enum RequestType {
GET_OR_CREATE,
DELETE,
SET,
COMPARE_AND_SET,
ADD_AND_GET,
UPDATE,
RECONCILE,
RESEND_PENDING_REQUESTS
}
protected enum ResponseType {
VOID,
GET_OR_CREATE,
BOOLEAN,
VALUE,
EXCEPTION,
RECONCILE
}
protected static RequestType requestToRequestType(Request req) {
if(req instanceof GetOrCreateRequest) return RequestType.GET_OR_CREATE;
if(req instanceof DeleteRequest) return RequestType.DELETE;
if(req instanceof AddAndGetRequest) return RequestType.ADD_AND_GET;
if(req instanceof UpdateRequest) return RequestType.UPDATE;
if(req instanceof SetRequest) return RequestType.SET;
if(req instanceof CompareAndSetRequest) return RequestType.COMPARE_AND_SET;
if(req instanceof ReconcileRequest) return RequestType.RECONCILE;
if(req instanceof ResendPendingRequests) return RequestType.RESEND_PENDING_REQUESTS;
throw new IllegalStateException("request " + req + " cannot be mapped to request type");
}
protected static ResponseType responseToResponseType(Response rsp) {
if(rsp instanceof GetOrCreateResponse) return ResponseType.GET_OR_CREATE;
if(rsp instanceof BooleanResponse) return ResponseType.BOOLEAN;
if(rsp instanceof ValueResponse) return ResponseType.VALUE;
if(rsp instanceof ExceptionResponse) return ResponseType.EXCEPTION;
if(rsp instanceof ReconcileResponse) return ResponseType.RECONCILE;
if(rsp != null) return ResponseType.VOID;
throw new IllegalStateException("response " + rsp + " cannot be mapped to response type");
}
public boolean getBypassBundling() {
return bypass_bundling;
}
public void setBypassBundling(boolean bypass_bundling) {
this.bypass_bundling=bypass_bundling;
}
@ManagedAttribute
public String getAddress() {
return local_addr != null? local_addr.toString() : null;
}
@ManagedAttribute
public String getView() {
return view != null? view.toString() : null;
}
@ManagedAttribute(description="List of the backup coordinator (null if num_backups <= 0")
public String getBackupCoords() {
return backup_coords != null? backup_coords.toString() : "null";
}
public Counter getOrCreateCounter(String name, long initial_value) {
if(local_addr == null)
throw new IllegalArgumentException("the channel needs to be connected before creating or getting a counter");
Owner owner=getOwner();
GetOrCreateRequest req=new GetOrCreateRequest(owner, name, initial_value);
Promise<long[]> promise=new Promise<>();
pending_requests.put(owner, new Tuple<>(req, promise));
sendRequest(coord, req);
long[] result=new long[0];
try {
result=promise.getResultWithTimeout(timeout);
long value=result[0], version=result[1];
if(!coord.equals(local_addr))
counters.put(name, new VersionedValue(value, version));
return new CounterImpl(name);
}
catch(TimeoutException e) {
throw new RuntimeException(e);
}
}
/** Sent asynchronously - we don't wait for an ack */
public void deleteCounter(String name) {
Owner owner=getOwner();
Request req=new DeleteRequest(owner, name);
sendRequest(coord, req);
if(!local_addr.equals(coord))
counters.remove(name);
}
public Object down(Event evt) {
switch(evt.getType()) {
case Event.SET_LOCAL_ADDRESS:
local_addr=evt.getArg();
break;
case Event.VIEW_CHANGE:
handleView(evt.arg());
break;
}
return down_prot.down(evt);
}
public Object up(Event evt) {
switch(evt.getType()) {
case Event.VIEW_CHANGE:
handleView(evt.getArg());
break;
}
return up_prot.up(evt);
}
public Object up(Message msg) {
CounterHeader hdr=msg.getHeader(id);
if(hdr == null)
return up_prot.up(msg);
try {
Object obj=streamableFromBuffer(msg.getRawBuffer(), msg.getOffset(), msg.getLength());
if(log.isTraceEnabled())
log.trace("[" + local_addr + "] <-- [" + msg.getSrc() + "] " + obj);
if(obj instanceof Request) {
handleRequest((Request)obj, msg.getSrc());
}
else if(obj instanceof Response) {
handleResponse((Response)obj, msg.getSrc());
}
else {
log.error(Util.getMessage("ReceivedObjectIsNeitherARequestNorAResponse") + obj);
}
}
catch(Exception ex) {
log.error(Util.getMessage("FailedHandlingMessage"), ex);
}
return null;
}
protected void handleRequest(Request req, Address sender) {
RequestType type=requestToRequestType(req);
switch(type) {
case GET_OR_CREATE:
if(!local_addr.equals(coord) || discard_requests)
return;
GetOrCreateRequest tmp=(GetOrCreateRequest)req;
VersionedValue new_val=new VersionedValue(tmp.initial_value);
VersionedValue val=counters.putIfAbsent(tmp.name, new_val);
if(val == null)
val=new_val;
Response rsp=new GetOrCreateResponse(tmp.owner, val.value, val.version);
sendResponse(sender,rsp);
if(backup_coords != null)
updateBackups(tmp.name, val.value, val.version);
break;
case DELETE:
if(!local_addr.equals(coord) || discard_requests)
return;
counters.remove(((SimpleRequest)req).name);
break;
case SET:
if(!local_addr.equals(coord) || discard_requests)
return;
val=counters.get(((SimpleRequest)req).name);
if(val == null) {
sendCounterNotFoundExceptionResponse(sender, ((SimpleRequest)req).owner, ((SimpleRequest)req).name);
return;
}
long[] result=val.set(((SetRequest)req).value);
rsp=new ValueResponse(((SimpleRequest)req).owner, result[0], result[1]);
sendResponse(sender, rsp);
if(backup_coords != null)
updateBackups(((SimpleRequest)req).name, result[0], result[1]);
break;
case COMPARE_AND_SET:
if(!local_addr.equals(coord) || discard_requests)
return;
val=counters.get(((SimpleRequest)req).name);
if(val == null) {
sendCounterNotFoundExceptionResponse(sender, ((SimpleRequest)req).owner, ((SimpleRequest)req).name);
return;
}
result=val.compareAndSet(((CompareAndSetRequest)req).expected,((CompareAndSetRequest)req).update);
rsp=new ValueResponse(((SimpleRequest)req).owner, result == null? -1 : result[0], result == null? -1 : result[1]);
sendResponse(sender, rsp);
if(backup_coords != null) {
VersionedValue value=counters.get(((SimpleRequest)req).name);
updateBackups(((SimpleRequest)req).name, value.value, value.version);
}
break;
case ADD_AND_GET:
if(!local_addr.equals(coord) || discard_requests)
return;
val=counters.get(((SimpleRequest)req).name);
if(val == null) {
sendCounterNotFoundExceptionResponse(sender, ((SimpleRequest)req).owner, ((SimpleRequest)req).name);
return;
}
result=val.addAndGet(((AddAndGetRequest)req).value);
rsp=new ValueResponse(((SimpleRequest)req).owner, result[0], result[1]);
sendResponse(sender, rsp);
if(backup_coords != null)
updateBackups(((SimpleRequest)req).name, result[0], result[1]);
break;
case UPDATE:
String counter_name=((UpdateRequest)req).name;
long new_value=((UpdateRequest)req).value, new_version=((UpdateRequest)req).version;
VersionedValue current=counters.get(counter_name);
if(current == null)
counters.put(counter_name, new VersionedValue(new_value, new_version));
else {
current.updateIfBigger(new_value, new_version);
}
break;
case RECONCILE:
if(sender.equals(local_addr)) // we don't need to reply to our own reconciliation request
break;
// return all values except those with lower or same versions than the ones in the ReconcileRequest
ReconcileRequest reconcile_req=(ReconcileRequest)req;
Map<String,VersionedValue> map=new HashMap<>(counters);
if(reconcile_req.names != null) {
for(int i=0; i < reconcile_req.names.length; i++) {
counter_name=reconcile_req.names[i];
long version=reconcile_req.versions[i];
VersionedValue my_value=map.get(counter_name);
if(my_value != null && my_value.version <= version)
map.remove(counter_name);
}
}
int len=map.size();
String[] names=new String[len];
long[] values=new long[len];
long[] versions=new long[len];
int index=0;
for(Map.Entry<String,VersionedValue> entry: map.entrySet()) {
names[index]=entry.getKey();
values[index]=entry.getValue().value;
versions[index]=entry.getValue().version;
index++;
}
rsp=new ReconcileResponse(names, values, versions);
sendResponse(sender, rsp);
break;
case RESEND_PENDING_REQUESTS:
for(Tuple<Request,Promise> tuple: pending_requests.values()) {
Request request=tuple.getVal1();
if(log.isTraceEnabled())
log.trace("[" + local_addr + "] --> [" + coord + "] resending " + request);
sendRequest(coord, request);
}
break;
default:
break;
}
}
protected VersionedValue getCounter(String name) {
VersionedValue val=counters.get(name);
if(val == null)
throw new IllegalStateException("counter \"" + name + "\" not found");
return val;
}
@SuppressWarnings("unchecked")
protected void handleResponse(Response rsp, Address sender) {
if(rsp instanceof ReconcileResponse) {
if(log.isTraceEnabled() && ((ReconcileResponse)rsp).names != null && ((ReconcileResponse)rsp).names.length > 0)
log.trace("[" + local_addr + "] <-- [" + sender + "] RECONCILE-RSP: " +
dump(((ReconcileResponse)rsp).names, ((ReconcileResponse)rsp).values, ((ReconcileResponse)rsp).versions));
if(reconciliation_task != null)
reconciliation_task.add((ReconcileResponse)rsp, sender);
return;
}
Tuple<Request,Promise> tuple=pending_requests.remove(((SimpleResponse)rsp).owner);
if(tuple == null) {
log.warn("response for " + ((SimpleResponse)rsp).owner + " didn't have an entry");
return;
}
Promise promise=tuple.getVal2();
if(rsp instanceof ValueResponse) {
ValueResponse tmp=(ValueResponse)rsp;
if(tmp.result == -1 && tmp.version == -1)
promise.setResult(null);
else {
long[] result={tmp.result,tmp.version};
promise.setResult(result);
}
}
else if(rsp instanceof BooleanResponse)
promise.setResult(((BooleanResponse)rsp).result);
else if(rsp instanceof ExceptionResponse) {
promise.setResult(new Throwable(((ExceptionResponse)rsp).error_message));
}
else
promise.setResult(null);
}
@ManagedOperation(description="Dumps all counters")
public String printCounters() {
StringBuilder sb=new StringBuilder();
for(Map.Entry<String,VersionedValue> entry: counters.entrySet())
sb.append(entry.getKey()).append(": ").append(entry.getValue()).append("\n");
return sb.toString();
}
@ManagedOperation(description="Dumps all pending requests")
public String dumpPendingRequests() {
StringBuilder sb=new StringBuilder();
for(Tuple<Request,Promise> tuple: pending_requests.values()) {
Request tmp=tuple.getVal1();
sb.append(tmp + " (" + tmp.getClass().getCanonicalName() + ") ");
}
return sb.toString();
}
protected void handleView(View view) {
this.view=view;
if(log.isDebugEnabled())
log.debug("view=" + view);
List<Address> members=view.getMembers();
Address old_coord=coord;
if(!members.isEmpty())
coord=members.get(0);
if(Objects.equals(coord, local_addr)) {
List<Address> old_backups=backup_coords != null? new ArrayList<>(backup_coords) : null;
backup_coords=new CopyOnWriteArrayList<>(Util.pickNext(members, local_addr, num_backups));
// send the current values to all *new* backups
List<Address> new_backups=Util.newElements(old_backups,backup_coords);
for(Address new_backup: new_backups) {
for(Map.Entry<String,VersionedValue> entry: counters.entrySet()) {
UpdateRequest update=new UpdateRequest(entry.getKey(), entry.getValue().value, entry.getValue().version);
sendRequest(new_backup, update);
}
}
}
else
backup_coords=null;
if(old_coord != null && coord != null && !old_coord.equals(coord) && local_addr.equals(coord)) {
discard_requests=true; // set to false when the task is done
startReconciliationTask();
}
}
protected Owner getOwner() {
return new Owner(local_addr, Thread.currentThread().getId());
}
protected void sendRequest(Address dest, Request req) {
try {
Buffer buffer=requestToBuffer(req);
Message msg=new Message(dest, buffer).putHeader(id, new CounterHeader());
if(bypass_bundling)
msg.setFlag(Message.Flag.DONT_BUNDLE);
if(log.isTraceEnabled())
log.trace("[" + local_addr + "] --> [" + (dest == null? "ALL" : dest) + "] " + req);
down_prot.down(msg);
}
catch(Exception ex) {
log.error(Util.getMessage("FailedSending") + req + " request: " + ex);
}
}
protected void sendResponse(Address dest, Response rsp) {
try {
Buffer buffer=responseToBuffer(rsp);
Message rsp_msg=new Message(dest, buffer).putHeader(id, new CounterHeader());
if(bypass_bundling)
rsp_msg.setFlag(Message.Flag.DONT_BUNDLE);
if(log.isTraceEnabled())
log.trace("[" + local_addr + "] --> [" + dest + "] " + rsp);
down_prot.down(rsp_msg);
}
catch(Exception ex) {
log.error(Util.getMessage("FailedSending") + rsp + " message to " + dest + ": " + ex);
}
}
protected void updateBackups(String name, long value, long version) {
Request req=new UpdateRequest(name, value, version);
try {
Buffer buffer=requestToBuffer(req);
if(backup_coords != null && !backup_coords.isEmpty()) {
for(Address backup_coord: backup_coords)
send(backup_coord, buffer);
}
}
catch(Exception ex) {
log.error(Util.getMessage("FailedSending") + req + " to backup coordinator(s):" + ex);
}
}
protected void send(Address dest, Buffer buffer) {
try {
Message rsp_msg=new Message(dest, buffer).putHeader(id, new CounterHeader());
if(bypass_bundling)
rsp_msg.setFlag(Message.Flag.DONT_BUNDLE);
down_prot.down(rsp_msg);
}
catch(Exception ex) {
log.error(Util.getMessage("FailedSendingMessageTo") + dest + ": " + ex);
}
}
protected void sendCounterNotFoundExceptionResponse(Address dest, Owner owner, String counter_name) {
Response rsp=new ExceptionResponse(owner, "counter \"" + counter_name + "\" not found");
sendResponse(dest, rsp);
}
protected static Buffer requestToBuffer(Request req) throws Exception {
return streamableToBuffer(REQUEST,(byte)requestToRequestType(req).ordinal(), req);
}
protected static Buffer responseToBuffer(Response rsp) throws Exception {
return streamableToBuffer(RESPONSE,(byte)responseToResponseType(rsp).ordinal(), rsp);
}
protected static Buffer streamableToBuffer(byte req_or_rsp, byte type, Streamable obj) throws Exception {
int expected_size=obj instanceof SizeStreamable? ((SizeStreamable)obj).serializedSize() : 100;
ByteArrayDataOutputStream out=new ByteArrayDataOutputStream(expected_size);
out.writeByte(req_or_rsp);
out.writeByte(type);
obj.writeTo(out);
return new Buffer(out.buffer(), 0, out.position());
}
protected static Streamable streamableFromBuffer(byte[] buf, int offset, int length) throws Exception {
switch(buf[offset]) {
case REQUEST:
return requestFromBuffer(buf, offset+1, length-1);
case RESPONSE:
return responseFromBuffer(buf, offset+1, length-1);
default:
throw new IllegalArgumentException("type " + buf[offset] + " is invalid (expected Request (1) or RESPONSE (2)");
}
}
protected static final Request requestFromBuffer(byte[] buf, int offset, int length) throws Exception {
ByteArrayInputStream input=new ByteArrayInputStream(buf, offset, length);
DataInputStream in=new DataInputStream(input);
RequestType type=RequestType.values()[in.readByte()];
Request retval=createRequest(type);
retval.readFrom(in);
return retval;
}
protected static Request createRequest(RequestType type) {
switch(type) {
case COMPARE_AND_SET: return new CompareAndSetRequest();
case ADD_AND_GET: return new AddAndGetRequest();
case UPDATE: return new UpdateRequest();
case GET_OR_CREATE: return new GetOrCreateRequest();
case DELETE: return new DeleteRequest();
case SET: return new SetRequest();
case RECONCILE: return new ReconcileRequest();
case RESEND_PENDING_REQUESTS: return new ResendPendingRequests();
default: throw new IllegalArgumentException("failed creating a request from " + type);
}
}
protected static final Response responseFromBuffer(byte[] buf, int offset, int length) throws Exception {
ByteArrayInputStream input=new ByteArrayInputStream(buf, offset, length);
DataInputStream in=new DataInputStream(input);
ResponseType type=ResponseType.values()[in.readByte()];
Response retval=createResponse(type);
retval.readFrom(in);
return retval;
}
protected static Response createResponse(ResponseType type) {
switch(type) {
case VOID: return new SimpleResponse();
case GET_OR_CREATE: return new GetOrCreateResponse();
case BOOLEAN: return new BooleanResponse();
case VALUE: return new ValueResponse();
case EXCEPTION: return new ExceptionResponse();
case RECONCILE: return new ReconcileResponse();
default: throw new IllegalArgumentException("failed creating a response from " + type);
}
}
protected synchronized void startReconciliationTask() {
if(reconciliation_task_future == null || reconciliation_task_future.isDone()) {
reconciliation_task=new ReconciliationTask();
reconciliation_task_future=getTransport().getTimer().schedule(reconciliation_task, 0, TimeUnit.MILLISECONDS);
}
}
protected synchronized void stopReconciliationTask() {
if(reconciliation_task_future != null) {
reconciliation_task_future.cancel(true);
if(reconciliation_task != null)
reconciliation_task.cancel();
reconciliation_task_future=null;
}
}
protected static void writeReconciliation(DataOutput out, String[] names, long[] values, long[] versions) throws IOException {
if(names == null) {
out.writeInt(0);
return;
}
out.writeInt(names.length);
for(String name: names)
Bits.writeString(name,out);
for(long value: values)
Bits.writeLong(value, out);
for(long version: versions)
Bits.writeLong(version, out);
}
protected static String[] readReconciliationNames(DataInput in, int len) throws IOException {
String[] retval=new String[len];
for(int i=0; i < len; i++)
retval[i]=Bits.readString(in);
return retval;
}
protected static long[] readReconciliationLongs(DataInput in, int len) throws IOException {
long[] retval=new long[len];
for(int i=0; i < len; i++)
retval[i]=Bits.readLong(in);
return retval;
}
protected static String dump(String[] names, long[] values, long[] versions) {
StringBuilder sb=new StringBuilder();
if(names != null) {
for(int i=0; i < names.length; i++) {
sb.append(names[i]).append(": ").append(values[i]).append(" (").append(versions[i]).append(")\n");
}
}
return sb.toString();
}
protected class CounterImpl implements Counter {
protected final String name;
protected CounterImpl(String name) {
this.name = name;
}
public String getName() {
return name;
}
@Override
public long get() {
return addAndGet(0);
}
@Override
public void set(long new_value) {
if(local_addr.equals(coord)) {
VersionedValue val=getCounter(name);
val.set(new_value);
if(backup_coords != null)
updateBackups(name, val.value, val.version);
return;
}
Owner owner=getOwner();
Request req=new SetRequest(owner, name, new_value);
Promise<long[]> promise=new Promise<>();
pending_requests.put(owner, new Tuple<>(req, promise));
sendRequest(coord, req);
Object obj=null;
try {
obj=promise.getResultWithTimeout(timeout);
if(obj instanceof Throwable)
throw new IllegalStateException((Throwable)obj);
long[] result=(long[])obj;
long value=result[0], version=result[1];
if(!coord.equals(local_addr))
counters.put(name, new VersionedValue(value, version));
}
catch(TimeoutException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean compareAndSet(long expect, long update) {
if(local_addr.equals(coord)) {
VersionedValue val=getCounter(name);
boolean retval=val.compareAndSet(expect, update) != null;
if(backup_coords != null)
updateBackups(name, val.value, val.version);
return retval;
}
Owner owner=getOwner();
Request req=new CompareAndSetRequest(owner, name, expect, update);
Promise<long[]> promise=new Promise<>();
pending_requests.put(owner, new Tuple<>(req, promise));
sendRequest(coord, req);
Object obj=null;
try {
obj=promise.getResultWithTimeout(timeout);
if(obj instanceof Throwable)
throw new IllegalStateException((Throwable)obj);
if(obj == null)
return false;
long[] result=(long[])obj;
long value=result[0], version=result[1];
if(!coord.equals(local_addr))
counters.put(name, new VersionedValue(value, version));
return true;
}
catch(TimeoutException e) {
throw new RuntimeException(e);
}
}
@Override
public long incrementAndGet() {
return addAndGet(1);
}
@Override
public long decrementAndGet() {
return addAndGet(-1);
}
@Override
public long addAndGet(long delta) {
if(local_addr.equals(coord)) {
VersionedValue val=getCounter(name);
long retval=val.addAndGet(delta)[0];
if(backup_coords != null)
updateBackups(name, val.value, val.version);
return retval;
}
Owner owner=getOwner();
Request req=new AddAndGetRequest(owner, name, delta);
Promise<long[]> promise=new Promise<>();
pending_requests.put(owner, new Tuple<>(req, promise));
sendRequest(coord, req);
Object obj=null;
try {
obj=promise.getResultWithTimeout(timeout);
if(obj instanceof Throwable)
throw new IllegalStateException((Throwable)obj);
long[] result=(long[])obj;
long value=result[0], version=result[1];
if(!coord.equals(local_addr))
counters.put(name, new VersionedValue(value, version));
return value;
}
catch(TimeoutException e) {
throw new RuntimeException(e);
}
}
@Override
public String toString() {
VersionedValue val=counters.get(name);
return val != null? val.toString() : "n/a";
}
}
protected interface Request extends Streamable {
}
protected static class SimpleRequest implements Request {
protected Owner owner;
protected String name;
protected SimpleRequest() {
}
protected SimpleRequest(Owner owner, String name) {
this.owner=owner;
this.name=name;
}
@Override
public void writeTo(DataOutput out) throws IOException {
owner.writeTo(out);
Bits.writeString(name,out);
}
@Override
public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
owner=new Owner();
owner.readFrom(in);
name=Bits.readString(in);
}
public String toString() {
return owner + " [" + name + "]";
}
}
protected static class ResendPendingRequests implements Request {
@Override
public void writeTo(DataOutput out) throws IOException {}
@Override
public void readFrom(DataInput in) throws IOException {}
public String toString() {return "ResendPendingRequests";}
}
protected static class GetOrCreateRequest extends SimpleRequest {
protected long initial_value;
protected GetOrCreateRequest() {}
GetOrCreateRequest(Owner owner, String name, long initial_value) {
super(owner,name);
this.initial_value=initial_value;
}
@Override
public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
super.readFrom(in);
initial_value=Bits.readLong(in);
}
@Override
public void writeTo(DataOutput out) throws IOException {
super.writeTo(out);
Bits.writeLong(initial_value, out);
}
}
protected static class DeleteRequest extends SimpleRequest {
protected DeleteRequest() {}
protected DeleteRequest(Owner owner, String name) {
super(owner,name);
}
public String toString() {return "DeleteRequest: " + super.toString();}
}
protected static class AddAndGetRequest extends SetRequest {
protected AddAndGetRequest() {}
protected AddAndGetRequest(Owner owner, String name, long value) {
super(owner,name,value);
}
public String toString() {return "AddAndGetRequest: " + super.toString();}
}
protected static class SetRequest extends SimpleRequest {
protected long value;
protected SetRequest() {}
protected SetRequest(Owner owner, String name, long value) {
super(owner, name);
this.value=value;
}
@Override
public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
super.readFrom(in);
value=Bits.readLong(in);
}
@Override
public void writeTo(DataOutput out) throws IOException {
super.writeTo(out);
Bits.writeLong(value, out);
}
public String toString() {return super.toString() + ": " + value;}
}
protected static class CompareAndSetRequest extends SimpleRequest {
protected long expected, update;
protected CompareAndSetRequest() {}
protected CompareAndSetRequest(Owner owner, String name, long expected, long update) {
super(owner, name);
this.expected=expected;
this.update=update;
}
@Override
public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
super.readFrom(in);
expected=Bits.readLong(in);
update=Bits.readLong(in);
}
@Override
public void writeTo(DataOutput out) throws IOException {
super.writeTo(out);
Bits.writeLong(expected, out);
Bits.writeLong(update, out);
}
public String toString() {return super.toString() + ", expected=" + expected + ", update=" + update;}
}
protected static class ReconcileRequest implements Request {
protected String[] names;
protected long[] values;
protected long[] versions;
protected ReconcileRequest() {}
protected ReconcileRequest(String[] names, long[] values, long[] versions) {
this.names=names;
this.values=values;
this.versions=versions;
}
@Override
public void writeTo(DataOutput out) throws IOException {
writeReconciliation(out, names, values, versions);
}
@Override
public void readFrom(DataInput in) throws IOException {
int len=in.readInt();
names=readReconciliationNames(in, len);
values=readReconciliationLongs(in, len);
versions=readReconciliationLongs(in,len);
}
public String toString() {return "ReconcileRequest (" + names.length + ") entries";}
}
protected static class UpdateRequest implements Request {
protected String name;
protected long value;
protected long version;
protected UpdateRequest() {}
protected UpdateRequest(String name, long value, long version) {
this.name=name;
this.value=value;
this.version=version;
}
@Override
public void writeTo(DataOutput out) throws IOException {
Bits.writeString(name,out);
Bits.writeLong(value, out);
Bits.writeLong(version, out);
}
@Override
public void readFrom(DataInput in) throws IOException {
name=Bits.readString(in);
value=Bits.readLong(in);
version=Bits.readLong(in);
}
public String toString() {return "UpdateRequest(" + name + ": "+ value + " (" + version + ")";}
}
protected interface Response extends Streamable {}
/** Response without data */
protected static class SimpleResponse implements Response {
protected Owner owner;
protected long version;
protected SimpleResponse() {}
protected SimpleResponse(Owner owner, long version) {
this.owner=owner;
this.version=version;
}
@Override
public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
owner=new Owner();
owner.readFrom(in);
version=Bits.readLong(in);
}
@Override
public void writeTo(DataOutput out) throws IOException {
owner.writeTo(out);
Bits.writeLong(version, out);
}
public String toString() {return "Response";}
}
protected static class BooleanResponse extends SimpleResponse {
protected boolean result;
protected BooleanResponse() {}
protected BooleanResponse(Owner owner, long version, boolean result) {
super(owner, version);
this.result=result;
}
@Override
public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
super.readFrom(in);
result=in.readBoolean();
}
@Override
public void writeTo(DataOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(result);
}
public String toString() {return "BooleanResponse(" + result + ")";}
}
protected static class ValueResponse extends SimpleResponse {
protected long result;
protected ValueResponse() {}
protected ValueResponse(Owner owner, long result, long version) {
super(owner, version);
this.result=result;
}
@Override
public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
super.readFrom(in);
result=Bits.readLong(in);
}
@Override
public void writeTo(DataOutput out) throws IOException {
super.writeTo(out);
Bits.writeLong(result, out);
}
public String toString() {return "ValueResponse(" + result + ")";}
}
protected static class GetOrCreateResponse extends ValueResponse {
protected GetOrCreateResponse() {}
protected GetOrCreateResponse(Owner owner, long result, long version) {
super(owner,result, version);
}
public String toString() {return "GetOrCreateResponse(" + result + ")";}
}
protected static class ExceptionResponse extends SimpleResponse {
protected String error_message;
protected ExceptionResponse() {}
protected ExceptionResponse(Owner owner, String error_message) {
super(owner, 0);
this.error_message=error_message;
}
@Override
public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
super.readFrom(in);
error_message=Bits.readString(in);
}
@Override
public void writeTo(DataOutput out) throws IOException {
super.writeTo(out);
Bits.writeString(error_message,out);
}
public String toString() {return "ExceptionResponse: " + super.toString();}
}
protected static class ReconcileResponse implements Response {
protected String[] names;
protected long[] values;
protected long[] versions;
protected ReconcileResponse() {}
protected ReconcileResponse(String[] names, long[] values, long[] versions) {
this.names=names;
this.values=values;
this.versions=versions;
}
@Override
public void writeTo(DataOutput out) throws IOException {
writeReconciliation(out,names,values,versions);
}
@Override
public void readFrom(DataInput in) throws IOException {
int len=in.readInt();
names=readReconciliationNames(in, len);
values=readReconciliationLongs(in, len);
versions=readReconciliationLongs(in,len);
}
public String toString() {
int num=names != null? names.length : 0;
return "ReconcileResponse (" + num + ") entries";
}
}
public static class CounterHeader extends Header {
public Supplier<? extends Header> create() {return CounterHeader::new;}
public short getMagicId() {return 74;}
@Override
public int serializedSize() {return 0;}
@Override
public void writeTo(DataOutput out) {}
@Override
public void readFrom(DataInput in) {}
}
protected static class VersionedValue {
protected long value;
protected long version=1;
protected VersionedValue(long value) {
this.value=value;
}
protected VersionedValue(long value, long version) {
this.value=value;
this.version=version;
}
/** num == 0 --> GET */
protected synchronized long[] addAndGet(long num) {
return num == 0? new long[]{value, version} : new long[]{value+=num, ++version};
}
protected synchronized long[] set(long value) {
return new long[]{this.value=value,++version};
}
protected synchronized long[] compareAndSet(long expected, long update) {
if(value == expected)
return new long[]{value=update, ++version};
return null;
}
/** Sets the value only if the version argument is greater than the own version */
protected synchronized void updateIfBigger(long value, long version) {
if(version > this.version) {
this.version=version;
this.value=value;
}
}
public String toString() {return value + " (version=" + version + ")";}
}
protected class ReconciliationTask implements Runnable {
protected ResponseCollector<ReconcileResponse> responses;
public void run() {
try {
_run();
}
finally {
discard_requests=false;
}
Request req=new ResendPendingRequests();
sendRequest(null, req);
}
protected void _run() {
Map<String,VersionedValue> copy=new HashMap<>(counters);
int len=copy.size();
String[] names=new String[len];
long[] values=new long[len], versions=new long[len];
int index=0;
for(Map.Entry<String,VersionedValue> entry: copy.entrySet()) {
names[index]=entry.getKey();
values[index]=entry.getValue().value;
versions[index]=entry.getValue().version;
index++;
}
List<Address> targets=new ArrayList<>(view.getMembers());
targets.remove(local_addr);
responses=new ResponseCollector<>(targets); // send to everyone but us
Request req=new ReconcileRequest(names, values, versions);
sendRequest(null, req);
responses.waitForAllResponses(reconciliation_timeout);
Map<Address,ReconcileResponse> reconcile_results=responses.getResults();
for(Map.Entry<Address,ReconcileResponse> entry: reconcile_results.entrySet()) {
if(entry.getKey().equals(local_addr))
continue;
ReconcileResponse rsp=entry.getValue();
if(rsp != null && rsp.names != null) {
for(int i=0; i < rsp.names.length; i++) {
String counter_name=rsp.names[i];
long version=rsp.versions[i];
long value=rsp.values[i];
VersionedValue my_value=counters.get(counter_name);
if(my_value == null) {
counters.put(counter_name, new VersionedValue(value, version));
continue;
}
if(my_value.version < version)
my_value.updateIfBigger(value, version);
}
}
}
}
public void add(ReconcileResponse rsp, Address sender) {
if(responses != null)
responses.add(sender, rsp);
}
protected void cancel() {
if(responses != null)
responses.reset();
}
public String toString() {
return COUNTER.class.getSimpleName() + ": " + getClass().getSimpleName();
}
}
}
| |
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.datetimepicker.time;
import com.android.datetimepicker.Utils;
import com.nineoldandroids.animation.AnimatorSet;
import com.nineoldandroids.animation.ObjectAnimator;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.Resources;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.view.AccessibilityDelegateCompat;
import android.support.v4.view.ViewCompat;
import android.support.v4.view.accessibility.AccessibilityManagerCompat;
import android.support.v4.view.accessibility.AccessibilityNodeInfoCompat;
import android.text.format.DateUtils;
import android.text.format.Time;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityManager;
import android.view.accessibility.AccessibilityNodeInfo;
import android.widget.FrameLayout;
import com.android.datetimepicker.HapticFeedbackController;
import com.android.datetimepicker.R;
import com.nineoldandroids.view.ViewHelper;
import java.util.ArrayList;
import java.util.List;
/**
* The primary layout to hold the circular picker, and the am/pm buttons. This view well measure
* itself to end up as a square. It also handles touches to be passed in to views that need to know
* when they'd been touched.
*/
public class RadialPickerLayout extends FrameLayout implements OnTouchListener {
private static final String TAG = "RadialPickerLayout";
private final int TOUCH_SLOP;
private final int TAP_TIMEOUT;
private static final int VISIBLE_DEGREES_STEP_SIZE = 30;
private static final int HOUR_VALUE_TO_DEGREES_STEP_SIZE = VISIBLE_DEGREES_STEP_SIZE;
private static final int MINUTE_VALUE_TO_DEGREES_STEP_SIZE = 6;
private static final int HOUR_INDEX = TimePickerDialog.HOUR_INDEX;
private static final int MINUTE_INDEX = TimePickerDialog.MINUTE_INDEX;
private static final int AMPM_INDEX = TimePickerDialog.AMPM_INDEX;
private static final int ENABLE_PICKER_INDEX = TimePickerDialog.ENABLE_PICKER_INDEX;
private static final int AM = TimePickerDialog.AM;
private static final int PM = TimePickerDialog.PM;
private int mLastValueSelected;
private HapticFeedbackController mHapticFeedbackController;
private OnValueSelectedListener mListener;
private boolean mTimeInitialized;
private int mCurrentHoursOfDay;
private int mCurrentMinutes;
private boolean mIs24HourMode;
private boolean mHideAmPm;
private int mCurrentItemShowing;
private CircleView mCircleView;
private AmPmCirclesView mAmPmCirclesView;
private RadialTextsView mHourRadialTextsView;
private RadialTextsView mMinuteRadialTextsView;
private RadialSelectorView mHourRadialSelectorView;
private RadialSelectorView mMinuteRadialSelectorView;
private View mGrayBox;
private int[] mSnapPrefer30sMap;
private boolean mInputEnabled;
private int mIsTouchingAmOrPm = -1;
private boolean mDoingMove;
private boolean mDoingTouch;
private int mDownDegrees;
private float mDownX;
private float mDownY;
private AccessibilityManager mAccessibilityManager;
private AnimatorSet mTransition;
private Handler mHandler = new Handler();
public interface OnValueSelectedListener {
void onValueSelected(int pickerIndex, int newValue, boolean autoAdvance);
}
public RadialPickerLayout(Context context, AttributeSet attrs) {
super(context, attrs);
setOnTouchListener(this);
ViewConfiguration vc = ViewConfiguration.get(context);
TOUCH_SLOP = vc.getScaledTouchSlop();
TAP_TIMEOUT = ViewConfiguration.getTapTimeout();
mDoingMove = false;
mCircleView = new CircleView(context);
addView(mCircleView);
mAmPmCirclesView = new AmPmCirclesView(context);
addView(mAmPmCirclesView);
mHourRadialTextsView = new RadialTextsView(context);
addView(mHourRadialTextsView);
mMinuteRadialTextsView = new RadialTextsView(context);
addView(mMinuteRadialTextsView);
mHourRadialSelectorView = new RadialSelectorView(context);
addView(mHourRadialSelectorView);
mMinuteRadialSelectorView = new RadialSelectorView(context);
addView(mMinuteRadialSelectorView);
// Prepare mapping to snap touchable degrees to selectable degrees.
preparePrefer30sMap();
mLastValueSelected = -1;
mInputEnabled = true;
mGrayBox = new View(context);
mGrayBox.setLayoutParams(new ViewGroup.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
mGrayBox.setBackgroundColor(getResources().getColor(R.color.transparent_black));
mGrayBox.setVisibility(View.INVISIBLE);
addView(mGrayBox);
mAccessibilityManager = (AccessibilityManager) context.getSystemService(Context.ACCESSIBILITY_SERVICE);
mTimeInitialized = false;
installAccessibilityDelegate();
}
private void installAccessibilityDelegate() {
ViewCompat.setAccessibilityDelegate(this, new AccessibilityDelegateCompat() {
/**
* Necessary for accessibility, to ensure we support "scrolling" forward and backward
* in the circle.
*/
@Override
public void onInitializeAccessibilityNodeInfo(View host,
AccessibilityNodeInfoCompat info) {
super.onInitializeAccessibilityNodeInfo(host, info);
info.addAction(AccessibilityNodeInfo.ACTION_SCROLL_FORWARD);
info.addAction(AccessibilityNodeInfo.ACTION_SCROLL_BACKWARD);
}
});
}
/**
* Measure the view to end up as a square, based on the minimum of the height and width.
*/
@Override
public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int measuredWidth = MeasureSpec.getSize(widthMeasureSpec);
int widthMode = MeasureSpec.getMode(widthMeasureSpec);
int measuredHeight = MeasureSpec.getSize(heightMeasureSpec);
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
int minDimension = Math.min(measuredWidth, measuredHeight);
super.onMeasure(MeasureSpec.makeMeasureSpec(minDimension, widthMode),
MeasureSpec.makeMeasureSpec(minDimension, heightMode));
}
public void setOnValueSelectedListener(OnValueSelectedListener listener) {
mListener = listener;
}
/**
* Initialize the Layout with starting values.
* @param context
* @param initialHoursOfDay
* @param initialMinutes
* @param is24HourMode
*/
public void initialize(Context context, HapticFeedbackController hapticFeedbackController,
int initialHoursOfDay, int initialMinutes, boolean is24HourMode) {
if (mTimeInitialized) {
Log.e(TAG, "Time has already been initialized.");
return;
}
mHapticFeedbackController = hapticFeedbackController;
mIs24HourMode = is24HourMode;
mHideAmPm = AccessibilityManagerCompat.isTouchExplorationEnabled(mAccessibilityManager) ? true : mIs24HourMode;
// Initialize the circle and AM/PM circles if applicable.
mCircleView.initialize(context, mHideAmPm);
mCircleView.invalidate();
if (!mHideAmPm) {
mAmPmCirclesView.initialize(context, initialHoursOfDay < 12? AM : PM);
mAmPmCirclesView.invalidate();
}
// Initialize the hours and minutes numbers.
Resources res = context.getResources();
int[] hours = {12, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
int[] hours_24 = {0, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23};
int[] minutes = {0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55};
String[] hoursTexts = new String[12];
String[] innerHoursTexts = new String[12];
String[] minutesTexts = new String[12];
for (int i = 0; i < 12; i++) {
hoursTexts[i] = is24HourMode?
String.format("%02d", hours_24[i]) : String.format("%d", hours[i]);
innerHoursTexts[i] = String.format("%d", hours[i]);
minutesTexts[i] = String.format("%02d", minutes[i]);
}
mHourRadialTextsView.initialize(res,
hoursTexts, (is24HourMode? innerHoursTexts : null), mHideAmPm, true);
mHourRadialTextsView.invalidate();
mMinuteRadialTextsView.initialize(res, minutesTexts, null, mHideAmPm, false);
mMinuteRadialTextsView.invalidate();
// Initialize the currently-selected hour and minute.
setValueForItem(HOUR_INDEX, initialHoursOfDay);
setValueForItem(MINUTE_INDEX, initialMinutes);
int hourDegrees = (initialHoursOfDay % 12) * HOUR_VALUE_TO_DEGREES_STEP_SIZE;
mHourRadialSelectorView.initialize(context, mHideAmPm, is24HourMode, true,
hourDegrees, isHourInnerCircle(initialHoursOfDay));
int minuteDegrees = initialMinutes * MINUTE_VALUE_TO_DEGREES_STEP_SIZE;
mMinuteRadialSelectorView.initialize(context, mHideAmPm, false, false,
minuteDegrees, false);
mTimeInitialized = true;
}
/* package */ public void setTheme(Context context, boolean themeDark) {
mCircleView.setTheme(context, themeDark);
mAmPmCirclesView.setTheme(context, themeDark);
mHourRadialTextsView.setTheme(context, themeDark);
mMinuteRadialTextsView.setTheme(context, themeDark);
mHourRadialSelectorView.setTheme(context, themeDark);
mMinuteRadialSelectorView.setTheme(context, themeDark);
}
public void setTime(int hours, int minutes) {
setItem(HOUR_INDEX, hours);
setItem(MINUTE_INDEX, minutes);
}
/**
* Set either the hour or the minute. Will set the internal value, and set the selection.
*/
private void setItem(int index, int value) {
if (index == HOUR_INDEX) {
setValueForItem(HOUR_INDEX, value);
int hourDegrees = (value % 12) * HOUR_VALUE_TO_DEGREES_STEP_SIZE;
mHourRadialSelectorView.setSelection(hourDegrees, isHourInnerCircle(value), false);
mHourRadialSelectorView.invalidate();
} else if (index == MINUTE_INDEX) {
setValueForItem(MINUTE_INDEX, value);
int minuteDegrees = value * MINUTE_VALUE_TO_DEGREES_STEP_SIZE;
mMinuteRadialSelectorView.setSelection(minuteDegrees, false, false);
mMinuteRadialSelectorView.invalidate();
}
}
/**
* Check if a given hour appears in the outer circle or the inner circle
* @return true if the hour is in the inner circle, false if it's in the outer circle.
*/
private boolean isHourInnerCircle(int hourOfDay) {
// We'll have the 00 hours on the outside circle.
return mIs24HourMode && (hourOfDay <= 12 && hourOfDay != 0);
}
public int getHours() {
return mCurrentHoursOfDay;
}
public int getMinutes() {
return mCurrentMinutes;
}
/**
* If the hours are showing, return the current hour. If the minutes are showing, return the
* current minute.
*/
private int getCurrentlyShowingValue() {
int currentIndex = getCurrentItemShowing();
if (currentIndex == HOUR_INDEX) {
return mCurrentHoursOfDay;
} else if (currentIndex == MINUTE_INDEX) {
return mCurrentMinutes;
} else {
return -1;
}
}
public int getIsCurrentlyAmOrPm() {
if (mCurrentHoursOfDay < 12) {
return AM;
} else if (mCurrentHoursOfDay < 24) {
return PM;
}
return -1;
}
/**
* Set the internal value for the hour, minute, or AM/PM.
*/
private void setValueForItem(int index, int value) {
if (index == HOUR_INDEX) {
mCurrentHoursOfDay = value;
} else if (index == MINUTE_INDEX){
mCurrentMinutes = value;
} else if (index == AMPM_INDEX) {
if (value == AM) {
mCurrentHoursOfDay = mCurrentHoursOfDay % 12;
} else if (value == PM) {
mCurrentHoursOfDay = (mCurrentHoursOfDay % 12) + 12;
}
}
}
/**
* Set the internal value as either AM or PM, and update the AM/PM circle displays.
* @param amOrPm
*/
public void setAmOrPm(int amOrPm) {
mAmPmCirclesView.setAmOrPm(amOrPm);
mAmPmCirclesView.invalidate();
setValueForItem(AMPM_INDEX, amOrPm);
}
/**
* Split up the 360 degrees of the circle among the 60 selectable values. Assigns a larger
* selectable area to each of the 12 visible values, such that the ratio of space apportioned
* to a visible value : space apportioned to a non-visible value will be 14 : 4.
* E.g. the output of 30 degrees should have a higher range of input associated with it than
* the output of 24 degrees, because 30 degrees corresponds to a visible number on the clock
* circle (5 on the minutes, 1 or 13 on the hours).
*/
private void preparePrefer30sMap() {
// We'll split up the visible output and the non-visible output such that each visible
// output will correspond to a range of 14 associated input degrees, and each non-visible
// output will correspond to a range of 4 associate input degrees, so visible numbers
// are more than 3 times easier to get than non-visible numbers:
// {354-359,0-7}:0, {8-11}:6, {12-15}:12, {16-19}:18, {20-23}:24, {24-37}:30, etc.
//
// If an output of 30 degrees should correspond to a range of 14 associated degrees, then
// we'll need any input between 24 - 37 to snap to 30. Working out from there, 20-23 should
// snap to 24, while 38-41 should snap to 36. This is somewhat counter-intuitive, that you
// can be touching 36 degrees but have the selection snapped to 30 degrees; however, this
// inconsistency isn't noticeable at such fine-grained degrees, and it affords us the
// ability to aggressively prefer the visible values by a factor of more than 3:1, which
// greatly contributes to the selectability of these values.
// Our input will be 0 through 360.
mSnapPrefer30sMap = new int[361];
// The first output is 0, and each following output will increment by 6 {0, 6, 12, ...}.
int snappedOutputDegrees = 0;
// Count of how many inputs we've designated to the specified output.
int count = 1;
// How many input we expect for a specified output. This will be 14 for output divisible
// by 30, and 4 for the remaining output. We'll special case the outputs of 0 and 360, so
// the caller can decide which they need.
int expectedCount = 8;
// Iterate through the input.
for (int degrees = 0; degrees < 361; degrees++) {
// Save the input-output mapping.
mSnapPrefer30sMap[degrees] = snappedOutputDegrees;
// If this is the last input for the specified output, calculate the next output and
// the next expected count.
if (count == expectedCount) {
snappedOutputDegrees += 6;
if (snappedOutputDegrees == 360) {
expectedCount = 7;
} else if (snappedOutputDegrees % 30 == 0) {
expectedCount = 14;
} else {
expectedCount = 4;
}
count = 1;
} else {
count++;
}
}
}
/**
* Returns mapping of any input degrees (0 to 360) to one of 60 selectable output degrees,
* where the degrees corresponding to visible numbers (i.e. those divisible by 30) will be
* weighted heavier than the degrees corresponding to non-visible numbers.
* See {@link #preparePrefer30sMap()} documentation for the rationale and generation of the
* mapping.
*/
private int snapPrefer30s(int degrees) {
if (mSnapPrefer30sMap == null) {
return -1;
}
return mSnapPrefer30sMap[degrees];
}
/**
* Returns mapping of any input degrees (0 to 360) to one of 12 visible output degrees (all
* multiples of 30), where the input will be "snapped" to the closest visible degrees.
* @param degrees The input degrees
* @param forceHigherOrLower The output may be forced to either the higher or lower step, or may
* be allowed to snap to whichever is closer. Use 1 to force strictly higher, -1 to force
* strictly lower, and 0 to snap to the closer one.
* @return output degrees, will be a multiple of 30
*/
private static int snapOnly30s(int degrees, int forceHigherOrLower) {
int stepSize = HOUR_VALUE_TO_DEGREES_STEP_SIZE;
int floor = (degrees / stepSize) * stepSize;
int ceiling = floor + stepSize;
if (forceHigherOrLower == 1) {
degrees = ceiling;
} else if (forceHigherOrLower == -1) {
if (degrees == floor) {
floor -= stepSize;
}
degrees = floor;
} else {
if ((degrees - floor) < (ceiling - degrees)) {
degrees = floor;
} else {
degrees = ceiling;
}
}
return degrees;
}
/**
* For the currently showing view (either hours or minutes), re-calculate the position for the
* selector, and redraw it at that position. The input degrees will be snapped to a selectable
* value.
* @param degrees Degrees which should be selected.
* @param isInnerCircle Whether the selection should be in the inner circle; will be ignored
* if there is no inner circle.
* @param forceToVisibleValue Even if the currently-showing circle allows for fine-grained
* selection (i.e. minutes), force the selection to one of the visibly-showing values.
* @param forceDrawDot The dot in the circle will generally only be shown when the selection
* is on non-visible values, but use this to force the dot to be shown.
* @return The value that was selected, i.e. 0-23 for hours, 0-59 for minutes.
*/
private int reselectSelector(int degrees, boolean isInnerCircle,
boolean forceToVisibleValue, boolean forceDrawDot) {
if (degrees == -1) {
return -1;
}
int currentShowing = getCurrentItemShowing();
int stepSize;
boolean allowFineGrained = !forceToVisibleValue && (currentShowing == MINUTE_INDEX);
if (allowFineGrained) {
degrees = snapPrefer30s(degrees);
} else {
degrees = snapOnly30s(degrees, 0);
}
RadialSelectorView radialSelectorView;
if (currentShowing == HOUR_INDEX) {
radialSelectorView = mHourRadialSelectorView;
stepSize = HOUR_VALUE_TO_DEGREES_STEP_SIZE;
} else {
radialSelectorView = mMinuteRadialSelectorView;
stepSize = MINUTE_VALUE_TO_DEGREES_STEP_SIZE;
}
radialSelectorView.setSelection(degrees, isInnerCircle, forceDrawDot);
radialSelectorView.invalidate();
if (currentShowing == HOUR_INDEX) {
if (mIs24HourMode) {
if (degrees == 0 && isInnerCircle) {
degrees = 360;
} else if (degrees == 360 && !isInnerCircle) {
degrees = 0;
}
} else if (degrees == 0) {
degrees = 360;
}
} else if (degrees == 360 && currentShowing == MINUTE_INDEX) {
degrees = 0;
}
int value = degrees / stepSize;
if (currentShowing == HOUR_INDEX && mIs24HourMode && !isInnerCircle && degrees != 0) {
value += 12;
}
return value;
}
/**
* Calculate the degrees within the circle that corresponds to the specified coordinates, if
* the coordinates are within the range that will trigger a selection.
* @param pointX The x coordinate.
* @param pointY The y coordinate.
* @param forceLegal Force the selection to be legal, regardless of how far the coordinates are
* from the actual numbers.
* @param isInnerCircle If the selection may be in the inner circle, pass in a size-1 boolean
* array here, inside which the value will be true if the selection is in the inner circle,
* and false if in the outer circle.
* @return Degrees from 0 to 360, if the selection was within the legal range. -1 if not.
*/
private int getDegreesFromCoords(float pointX, float pointY, boolean forceLegal,
final Boolean[] isInnerCircle) {
int currentItem = getCurrentItemShowing();
if (currentItem == HOUR_INDEX) {
return mHourRadialSelectorView.getDegreesFromCoords(
pointX, pointY, forceLegal, isInnerCircle);
} else if (currentItem == MINUTE_INDEX) {
return mMinuteRadialSelectorView.getDegreesFromCoords(
pointX, pointY, forceLegal, isInnerCircle);
} else {
return -1;
}
}
/**
* Get the item (hours or minutes) that is currently showing.
*/
public int getCurrentItemShowing() {
if (mCurrentItemShowing != HOUR_INDEX && mCurrentItemShowing != MINUTE_INDEX) {
Log.e(TAG, "Current item showing was unfortunately set to "+mCurrentItemShowing);
return -1;
}
return mCurrentItemShowing;
}
/**
* Set either minutes or hours as showing.
* @param animate True to animate the transition, false to show with no animation.
*/
public void setCurrentItemShowing(int index, boolean animate) {
if (index != HOUR_INDEX && index != MINUTE_INDEX) {
Log.e(TAG, "TimePicker does not support view at index "+index);
return;
}
int lastIndex = getCurrentItemShowing();
mCurrentItemShowing = index;
if (animate && (index != lastIndex)) {
List<ObjectAnimator> anims = new ArrayList<ObjectAnimator>();
if (index == MINUTE_INDEX) {
anims.addAll(mHourRadialTextsView.getDisappearAnimator());
anims.addAll(mHourRadialSelectorView.getDisappearAnimator());
anims.addAll(mMinuteRadialTextsView.getReappearAnimator());
anims.addAll(mMinuteRadialSelectorView.getReappearAnimator());
} else if (index == HOUR_INDEX){
anims.addAll(mHourRadialTextsView.getReappearAnimator());
anims.addAll(mHourRadialSelectorView.getReappearAnimator());
anims.addAll(mMinuteRadialTextsView.getDisappearAnimator());
anims.addAll(mMinuteRadialSelectorView.getDisappearAnimator());
}
if (mTransition != null && mTransition.isRunning()) {
mTransition.end();
}
mTransition = new AnimatorSet();
mTransition.playTogether(anims.toArray(new ObjectAnimator[anims.size()]));
mTransition.start();
} else {
int hourAlpha = (index == HOUR_INDEX) ? 255 : 0;
int minuteAlpha = (index == MINUTE_INDEX) ? 255 : 0;
Log.i(TAG, "set current item showing, no animate, hour alpha:" + hourAlpha + ", min alpha:" + minuteAlpha);
ViewHelper.setAlpha(mHourRadialTextsView, hourAlpha);
ViewHelper.setAlpha(mHourRadialSelectorView, hourAlpha);
ViewHelper.setAlpha(mMinuteRadialTextsView, minuteAlpha);
ViewHelper.setAlpha(mMinuteRadialSelectorView, minuteAlpha);
}
}
@Override
public boolean onTouch(View v, MotionEvent event) {
final float eventX = event.getX();
final float eventY = event.getY();
int degrees;
int value;
final Boolean[] isInnerCircle = new Boolean[1];
isInnerCircle[0] = false;
switch(event.getAction()) {
case MotionEvent.ACTION_DOWN:
if (!mInputEnabled) {
return true;
}
mDownX = eventX;
mDownY = eventY;
mLastValueSelected = -1;
mDoingMove = false;
mDoingTouch = true;
// If we're showing the AM/PM, check to see if the user is touching it.
if (!mHideAmPm) {
mIsTouchingAmOrPm = mAmPmCirclesView.getIsTouchingAmOrPm(eventX, eventY);
} else {
mIsTouchingAmOrPm = -1;
}
if (mIsTouchingAmOrPm == AM || mIsTouchingAmOrPm == PM) {
// If the touch is on AM or PM, set it as "touched" after the TAP_TIMEOUT
// in case the user moves their finger quickly.
mHapticFeedbackController.tryVibrate();
mDownDegrees = -1;
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
mAmPmCirclesView.setAmOrPmPressed(mIsTouchingAmOrPm);
mAmPmCirclesView.invalidate();
}
}, TAP_TIMEOUT);
} else {
// If we're in accessibility mode, force the touch to be legal. Otherwise,
// it will only register within the given touch target zone.
boolean forceLegal = AccessibilityManagerCompat.isTouchExplorationEnabled(mAccessibilityManager);
// Calculate the degrees that is currently being touched.
mDownDegrees = getDegreesFromCoords(eventX, eventY, forceLegal, isInnerCircle);
if (mDownDegrees != -1) {
// If it's a legal touch, set that number as "selected" after the
// TAP_TIMEOUT in case the user moves their finger quickly.
mHapticFeedbackController.tryVibrate();
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
mDoingMove = true;
int value = reselectSelector(mDownDegrees, isInnerCircle[0],
false, true);
mLastValueSelected = value;
mListener.onValueSelected(getCurrentItemShowing(), value, false);
}
}, TAP_TIMEOUT);
}
}
return true;
case MotionEvent.ACTION_MOVE:
if (!mInputEnabled) {
// We shouldn't be in this state, because input is disabled.
Log.e(TAG, "Input was disabled, but received ACTION_MOVE.");
return true;
}
float dY = Math.abs(eventY - mDownY);
float dX = Math.abs(eventX - mDownX);
if (!mDoingMove && dX <= TOUCH_SLOP && dY <= TOUCH_SLOP) {
// Hasn't registered down yet, just slight, accidental movement of finger.
break;
}
// If we're in the middle of touching down on AM or PM, check if we still are.
// If so, no-op. If not, remove its pressed state. Either way, no need to check
// for touches on the other circle.
if (mIsTouchingAmOrPm == AM || mIsTouchingAmOrPm == PM) {
mHandler.removeCallbacksAndMessages(null);
int isTouchingAmOrPm = mAmPmCirclesView.getIsTouchingAmOrPm(eventX, eventY);
if (isTouchingAmOrPm != mIsTouchingAmOrPm) {
mAmPmCirclesView.setAmOrPmPressed(-1);
mAmPmCirclesView.invalidate();
mIsTouchingAmOrPm = -1;
}
break;
}
if (mDownDegrees == -1) {
// Original down was illegal, so no movement will register.
break;
}
// We're doing a move along the circle, so move the selection as appropriate.
mDoingMove = true;
mHandler.removeCallbacksAndMessages(null);
degrees = getDegreesFromCoords(eventX, eventY, true, isInnerCircle);
if (degrees != -1) {
value = reselectSelector(degrees, isInnerCircle[0], false, true);
if (value != mLastValueSelected) {
mHapticFeedbackController.tryVibrate();
mLastValueSelected = value;
mListener.onValueSelected(getCurrentItemShowing(), value, false);
}
}
return true;
case MotionEvent.ACTION_UP:
if (!mInputEnabled) {
// If our touch input was disabled, tell the listener to re-enable us.
Log.d(TAG, "Input was disabled, but received ACTION_UP.");
mListener.onValueSelected(ENABLE_PICKER_INDEX, 1, false);
return true;
}
mHandler.removeCallbacksAndMessages(null);
mDoingTouch = false;
// If we're touching AM or PM, set it as selected, and tell the listener.
if (mIsTouchingAmOrPm == AM || mIsTouchingAmOrPm == PM) {
int isTouchingAmOrPm = mAmPmCirclesView.getIsTouchingAmOrPm(eventX, eventY);
mAmPmCirclesView.setAmOrPmPressed(-1);
mAmPmCirclesView.invalidate();
if (isTouchingAmOrPm == mIsTouchingAmOrPm) {
mAmPmCirclesView.setAmOrPm(isTouchingAmOrPm);
if (getIsCurrentlyAmOrPm() != isTouchingAmOrPm) {
mListener.onValueSelected(AMPM_INDEX, mIsTouchingAmOrPm, false);
setValueForItem(AMPM_INDEX, isTouchingAmOrPm);
}
}
mIsTouchingAmOrPm = -1;
break;
}
// If we have a legal degrees selected, set the value and tell the listener.
if (mDownDegrees != -1) {
degrees = getDegreesFromCoords(eventX, eventY, mDoingMove, isInnerCircle);
if (degrees != -1) {
value = reselectSelector(degrees, isInnerCircle[0], !mDoingMove, false);
if (getCurrentItemShowing() == HOUR_INDEX && !mIs24HourMode) {
int amOrPm = getIsCurrentlyAmOrPm();
if (amOrPm == AM && value == 12) {
value = 0;
} else if (amOrPm == PM && value != 12) {
value += 12;
}
}
setValueForItem(getCurrentItemShowing(), value);
mListener.onValueSelected(getCurrentItemShowing(), value, true);
}
}
mDoingMove = false;
return true;
default:
break;
}
return false;
}
/**
* Set touch input as enabled or disabled, for use with keyboard mode.
*/
public boolean trySettingInputEnabled(boolean inputEnabled) {
if (mDoingTouch && !inputEnabled) {
// If we're trying to disable input, but we're in the middle of a touch event,
// we'll allow the touch event to continue before disabling input.
return false;
}
mInputEnabled = inputEnabled;
mGrayBox.setVisibility(inputEnabled ? View.INVISIBLE : View.VISIBLE);
return true;
}
/**
* Announce the currently-selected time when launched.
*/
@Override
public boolean dispatchPopulateAccessibilityEvent(AccessibilityEvent event) {
if (event.getEventType() == AccessibilityEvent.TYPE_WINDOW_STATE_CHANGED) {
// Clear the event's current text so that only the current time will be spoken.
event.getText().clear();
Time time = new Time();
time.hour = getHours();
time.minute = getMinutes();
long millis = time.normalize(true);
int flags = DateUtils.FORMAT_SHOW_TIME;
if (mIs24HourMode) {
flags |= DateUtils.FORMAT_24HOUR;
}
String timeString = DateUtils.formatDateTime(getContext(), millis, flags);
event.getText().add(timeString);
return true;
}
return super.dispatchPopulateAccessibilityEvent(event);
}
/**
* When scroll forward/backward events are received, jump the time to the higher/lower
* discrete, visible value on the circle.
*/
@SuppressLint("NewApi")
@Override
public boolean performAccessibilityAction(int action, Bundle arguments) {
if (super.performAccessibilityAction(action, arguments)) {
return true;
}
int changeMultiplier = 0;
if (action == AccessibilityNodeInfo.ACTION_SCROLL_FORWARD) {
changeMultiplier = 1;
} else if (action == AccessibilityNodeInfo.ACTION_SCROLL_BACKWARD) {
changeMultiplier = -1;
}
if (changeMultiplier != 0) {
int value = getCurrentlyShowingValue();
int stepSize = 0;
int currentItemShowing = getCurrentItemShowing();
if (currentItemShowing == HOUR_INDEX) {
stepSize = HOUR_VALUE_TO_DEGREES_STEP_SIZE;
value %= 12;
} else if (currentItemShowing == MINUTE_INDEX) {
stepSize = MINUTE_VALUE_TO_DEGREES_STEP_SIZE;
}
int degrees = value * stepSize;
degrees = snapOnly30s(degrees, changeMultiplier);
value = degrees / stepSize;
int maxValue = 0;
int minValue = 0;
if (currentItemShowing == HOUR_INDEX) {
if (mIs24HourMode) {
maxValue = 23;
} else {
maxValue = 12;
minValue = 1;
}
} else {
maxValue = 55;
}
if (value > maxValue) {
// If we scrolled forward past the highest number, wrap around to the lowest.
value = minValue;
} else if (value < minValue) {
// If we scrolled backward past the lowest number, wrap around to the highest.
value = maxValue;
}
setItem(currentItemShowing, value);
mListener.onValueSelected(currentItemShowing, value, false);
return true;
}
return false;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.resolve;
import com.intellij.navigation.NavigationItem;
import com.intellij.openapi.util.RecursionManager;
import com.intellij.psi.*;
import com.intellij.psi.infos.MethodCandidateInfo;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.annotations.NonNls;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.junit.Assert.assertThat;
/**
* @author dsl
*/
public class ResolveMethod15Test extends Resolve15TestCase {
public void testStaticImportOnDemand() throws Exception {
final PsiReference ref = configureByFile();
final PsiElement element = ref.resolve();
assertNotNull(element);
assertThat(element, instanceOf(PsiMethod.class));
final PsiMethod method = (PsiMethod)element;
assertEquals("asList", method.getName());
assertEquals("java.util.Arrays", method.getContainingClass().getQualifiedName());
}
public void testStaticImportHidden() throws Exception {
final PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult result = ref.advancedResolve(false);
assertFalse(result.isValidResult());
final PsiElement element = result.getElement();
assertNotNull(element);
assertThat(element, instanceOf(PsiMethod.class));
final PsiMethod method = (PsiMethod)element;
assertEquals(CommonClassNames.JAVA_LANG_OBJECT, method.getContainingClass().getQualifiedName());
}
public void testStaticImportDirect() throws Exception {
final PsiReference ref = configureByFile();
final PsiElement element = ref.resolve();
assertNotNull(element);
assertThat(element, instanceOf(PsiMethod.class));
final PsiMethod method = (PsiMethod)element;
assertEquals("asList", method.getName());
assertEquals("java.util.Arrays", method.getContainingClass().getQualifiedName());
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final JavaResolveResult[] resolveResults = refExpr.multiResolve(false);
assertEquals(1, resolveResults.length);
final JavaResolveResult resolveResult = resolveResults[0];
assertTrue(resolveResult.isValidResult());
assertThat(resolveResult.getCurrentFileResolveScope(), instanceOf(PsiImportStaticStatement.class));
assertThat(resolveResult, instanceOf(MethodCandidateInfo.class));
final MethodCandidateInfo methodCandidateInfo = (MethodCandidateInfo)resolveResult;
assertTrue(methodCandidateInfo.isApplicable());
}
public void testStaticImportConflict() throws Exception {
final PsiReference ref = configureByFile();
final PsiElement element = ref.resolve();
assertNotNull(element);
assertThat(element, instanceOf(PsiMethod.class));
final PsiMethod method = (PsiMethod)element;
assertEquals("sort", method.getName());
assertEquals("java.util.Collections", method.getContainingClass().getQualifiedName());
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final JavaResolveResult[] resolveResults = refExpr.multiResolve(false);
assertEquals(1, resolveResults.length);
final JavaResolveResult resolveResult = resolveResults[0];
assertFalse(resolveResult.isValidResult());
assertThat(resolveResult.getCurrentFileResolveScope(), instanceOf(PsiImportStaticStatement.class));
assertThat(resolveResult, instanceOf(MethodCandidateInfo.class));
final MethodCandidateInfo methodCandidateInfo = (MethodCandidateInfo)resolveResult;
assertFalse(methodCandidateInfo.isApplicable());
}
public void testStaticImportConflict1() throws Exception {
final PsiJavaCodeReferenceElement ref = (PsiJavaCodeReferenceElement)configureByFile();
final JavaResolveResult result = ref.advancedResolve(false);
PsiElement element = result.getElement();
assertTrue(!result.isValidResult());
assertThat(element, instanceOf(PsiMethod.class));
final PsiMethod method = (PsiMethod)element;
PsiMethod parentMethod = PsiTreeUtil.getParentOfType(ref.getElement(), PsiMethod.class);
assertEquals(method, parentMethod);
}
public void testStaticImportConflict3() throws Exception {
final PsiJavaCodeReferenceElement ref = (PsiJavaCodeReferenceElement)configureByFile();
final JavaResolveResult result = ref.advancedResolve(false);
assertResolvesToMethodInClass(result, "ToImportX2");
}
public void testGenericsAndVarargsNoConflict() throws Exception {
final PsiReference ref = configureByFile();
final PsiElement element = ref.resolve();
assertNotNull(element);
assertThat(element, instanceOf(PsiMethod.class));
final PsiMethod method = (PsiMethod)element;
assertEquals("method", method.getName());
assertEquals(method.getTypeParameters().length, 0);
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final JavaResolveResult[] resolveResults = refExpr.multiResolve(false);
assertEquals(1, resolveResults.length);
final JavaResolveResult resolveResult = resolveResults[0];
assertTrue(resolveResult.isValidResult());
assertThat(resolveResult, instanceOf(MethodCandidateInfo.class));
final MethodCandidateInfo methodCandidateInfo = (MethodCandidateInfo)resolveResult;
assertTrue(methodCandidateInfo.isApplicable());
}
//JLS3 15.2.8 hack
public void testGetClass() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiType type = ((PsiExpression)refExpr.getParent()).getType();
assertEquals("java.lang.Class<? extends java.lang.String>", type.getCanonicalText());
}
public void testToString() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement resolve = refExpr.resolve();
assertTrue(resolve != null ? resolve.toString() : null, resolve instanceof PsiMethod);
final PsiClass containingClass = ((PsiMethod)resolve).getContainingClass();
assertTrue(containingClass != null ? containingClass.getName() : null, containingClass instanceof PsiAnonymousClass);
}
public void testListEquals() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement resolve = refExpr.resolve();
assertTrue(resolve != null ? resolve.toString() : null, resolve instanceof PsiMethod);
final PsiClass containingClass = ((PsiMethod)resolve).getContainingClass();
assertNotNull(containingClass);
assertTrue(containingClass.toString(), CommonClassNames.JAVA_UTIL_LIST.equals(containingClass.getQualifiedName()));
}
public void testCovariantReturnTypeAnonymous() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement resolve = refExpr.resolve();
assertTrue(resolve != null ? resolve.toString() : null, resolve instanceof PsiMethod);
final PsiClass containingClass = ((PsiMethod)resolve).getContainingClass();
assertTrue(containingClass != null ? containingClass.getName() : null, !(containingClass instanceof PsiAnonymousClass));
}
public void testNonPublicAnonymous() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement resolve = refExpr.resolve();
assertTrue(resolve != null ? resolve.toString() : null, resolve instanceof PsiMethod);
final PsiClass containingClass = ((PsiMethod)resolve).getContainingClass();
assertTrue(containingClass != null ? containingClass.getName() : null, !(containingClass instanceof PsiAnonymousClass));
}
public void testFilterFixedVsVarargs1() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
assertNull(call.resolveMethod());
}
public void testFilterFixedVsVarargs2() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
assertNull(call.resolveMethod());
}
public void testFilterFixedVsVarargs3() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
assertNull(call.resolveMethod());
}
public void testFilterFixedVsVarargs4() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
assertNotNull(resolveResult.getElement());
assertTrue(resolveResult.isValidResult());
}
public void testFilterFixedVsVarargs5() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
assertTrue(!((PsiMethod) element).isVarArgs());
}
public void testFilterFixedVsVarargs6() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
assertTrue(((PsiMethod) element).isVarArgs());
}
public void testFilterFixedVsVarargs7() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiMethodCallExpression call = (PsiMethodCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
assertTrue(!((PsiMethod) element).isVarArgs());
}
public void testFilterFixedVsVarargs8() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
assertTrue(!((PsiMethod) element).isVarArgs());
}
public void testFilterFixedVsVarargs9() throws Exception {
RecursionManager.assertOnRecursionPrevention(getTestRootDisposable());
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
assertTrue(((PsiMethod) element).isVarArgs());
}
public void testFilterBoxing1() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
final PsiMethod method = (PsiMethod)element;
assertEquals(PsiType.BOOLEAN, method.getParameterList().getParameters()[1].getType());
}
public void testFilterVarargsVsVarargs1() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
PsiElement element = resolveResult.getElement();
assertNotNull(element);
assertTrue(resolveResult.isValidResult());
assertEquals(((PsiMethod)element).getParameterList().getParametersCount(), 3);
}
public void testFilterVarargsVsVarargs2() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
assertNull(resolveResult.getElement());
assertFalse(resolveResult.isValidResult());
final JavaResolveResult[] candidates = refExpr.multiResolve(false);
assertEquals(2, candidates.length);
}
public void testFilterVarargsVsVarargs3() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
assertNotNull(resolveResult.getElement());
assertFalse(resolveResult.isValidResult());
final JavaResolveResult[] candidates = refExpr.multiResolve(false);
assertEquals(1, candidates.length);
}
public void testFilterVarargsVsVarargs4() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
PsiCallExpression call = (PsiCallExpression) refExpr.getParent();
JavaResolveResult resolveResult = call.resolveMethodGenerics();
assertNull(resolveResult.getElement());
assertFalse(resolveResult.isValidResult());
final JavaResolveResult[] candidates = refExpr.multiResolve(false);
assertEquals(2, candidates.length);
}
//IDEADEV-3313
public void testCovariantReturnTypes() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement parent = refExpr.getParent();
assertThat(parent, instanceOf(PsiMethodCallExpression.class));
final PsiMethod method = ((PsiCall)parent).resolveMethod();
assertNotNull(method);
assertEquals("E", method.getContainingClass().getName());
}
public void testGenericMethods1() throws Exception {
final PsiReference ref = configureByFile();
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement parent = refExpr.getParent();
assertThat(parent, instanceOf(PsiMethodCallExpression.class));
final PsiMethodCallExpression expression = (PsiMethodCallExpression)parent;
assertNull(expression.resolveMethod());
final JavaResolveResult[] results = expression.getMethodExpression().multiResolve(false);
assertEquals(2, results.length);
}
public void testGenericMethods2() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals(0, method.getTypeParameters().length);
}
public void testGenericMethods3() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals(0, method.getTypeParameters().length);
}
public void testGenericMethods4() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals(0, method.getTypeParameters().length);
}
public void testGenericMethods5() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals(2, method.getTypeParameters().length);
}
public void testGenericMethods6() throws Exception {
final PsiReference ref = configureByFile();
checkResolvesUnique(ref);
}
public void testGenericClass1() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals("Foo", method.getContainingClass().getName());
}
public void testGenericClass2() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals(0, method.getTypeParameters().length);
}
public void testMoreSpecificSameErasure() throws Exception {
final PsiReference ref = configureByFile();
final PsiMethod method = checkResolvesUnique(ref);
assertEquals(0, method.getTypeParameters().length);
}
private PsiReference configureByFile() throws Exception {
return configureByFile("method/generics/" + getTestName(false) + ".java");
}
private static PsiMethod checkResolvesUnique(final PsiReference ref) {
assertThat(ref, instanceOf(PsiReferenceExpression.class));
final PsiReferenceExpression refExpr = (PsiReferenceExpression)ref;
final PsiElement parent = refExpr.getParent();
assertThat(parent, instanceOf(PsiMethodCallExpression.class));
final PsiMethodCallExpression expression = (PsiMethodCallExpression)parent;
final PsiMethod method = expression.resolveMethod();
assertNotNull(method);
return method;
}
public void testTestGenericMethodOverloading1() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
assertThat(target.getParent(), instanceOf(PsiClass.class));
assertEquals("Object", ((NavigationItem)target.getParent()).getName());
}
public void testPreferArrayTypeToObject() throws Exception {
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
assertThat(target.getParent(), instanceOf(PsiClass.class));
final PsiParameter[] parameters = ((PsiMethod)target).getParameterList().getParameters();
assertEquals(1, parameters.length);
assertTrue(parameters[0].getType() instanceof PsiArrayType);
}
public void testTestGenericMethodOverloading2() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
assertThat(target.getParent(), instanceOf(PsiClass.class));
assertEquals("A", ((NavigationItem)target.getParent()).getName());
}
public void testTestGenericMethodOverloading3() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
assertThat(target.getParent(), instanceOf(PsiClass.class));
assertEquals("Object", ((NavigationItem)target.getParent()).getName());
}
public void testTestGenericMethodOverloading4() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
assertThat(target.getParent(), instanceOf(PsiClass.class));
assertEquals("A", ((NavigationItem)target.getParent()).getName());
}
public void testTestReturnType1() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testTestReturnType2() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertNull(target);
}
public void testMerge1() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertNull(target);
}
public void testExtends1() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testInheritance1() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testInheritance2() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testInheritance3() throws Exception{
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testInheritance4() throws Exception{
RecursionManager.assertOnRecursionPrevention(getTestRootDisposable());
PsiReference ref = configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testExplicitParams1() throws Exception {
PsiReference ref = configureByFile();
assertGenericResolve(ref, "f", new String[] {"java.lang.String"}, "java.lang.String");
}
public void testExplicitParams2() throws Exception {
PsiReference ref = configureByFile();
assertGenericResolve(ref, "f", new String[] {"java.lang.Integer"}, "Foo");
}
public void testConstructorExplicitParams() throws Exception {
PsiReference ref = configureByFile();
assertThat(ref.getElement(), instanceOf(PsiJavaCodeReferenceElement.class));
assertThat(ref.getElement().getParent(), instanceOf(PsiNewExpression.class));
}
private static void assertGenericResolve(PsiReference ref, final String methodName, final String[] expectedTypeParameterValues, @NonNls final String expectedCallType) {
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
PsiMethod psiMethod = (PsiMethod)target;
assertEquals(methodName, psiMethod.getName());
assertThat(ref.getElement(), instanceOf(PsiJavaCodeReferenceElement.class));
PsiJavaCodeReferenceElement refElement = (PsiJavaCodeReferenceElement)ref.getElement();
JavaResolveResult resolveResult = refElement.advancedResolve(false);
PsiSubstitutor substitutor = resolveResult.getSubstitutor();
PsiTypeParameter[] typeParameters = psiMethod.getTypeParameters();
assertEquals(expectedTypeParameterValues.length, typeParameters.length);
for (int i = 0; i < expectedTypeParameterValues.length; i++) {
String expectedTypeParameterValue = expectedTypeParameterValues[i];
assertTrue(substitutor.substitute(typeParameters[i]).equalsToText(expectedTypeParameterValue));
}
PsiType type = ((PsiExpression)refElement.getParent()).getType();
assertTrue(type.equalsToText(expectedCallType));
}
public void testRawMethod1() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
PsiElement target = ref.resolve();
assertThat(target, instanceOf(PsiMethod.class));
}
public void testDependingParams2() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertTrue(result.isValidResult());
}
public void testTypeInference1() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertNotNull(result.getElement());
}
public void testRawVsGenericConflict() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertResolvesToMethodInClass(result, "A");
}
public void testRawInheritanceConflict() throws Exception {
PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult[] result = ref.multiResolve(false);
assertEquals("False ambiguity", 1, result.length);
}
public void testRawVsGenericConflictInCaseOfOverride() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertResolvesToMethodInClass(result, "B");
}
public void testRawVsGenericConflictInCaseOfOverride2() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertResolvesToMethodInClass(result, "TestProcessor");
}
public void testAutoboxingAndWidening() throws Exception{
PsiJavaReference ref = (PsiJavaReference) configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertNotNull(result.getElement());
assertTrue(result.isValidResult());
}
public void testSOE() throws Exception {
PsiReference ref = configureByFile();
ref.resolve();
}
public void testHidingSuperPrivate() throws Exception {
PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertResolvesToMethodInClass(result, "S");
}
public void testNestedTypeParams() throws Exception {
PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertResolvesToMethodInClass(result, "TestImpl");
}
public void testTypeParamBoundConflict() throws Exception {
PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertResolvesToMethodInClass(result, "Testergen");
}
public void testAmbiguousBoxing() throws Exception {
PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertFalse(result.isValidResult());
JavaResolveResult[] results = ref.multiResolve(false);
assertEquals(2, results.length);
assertEquals("f", ((PsiMethod)results[0].getElement()).getName());
assertEquals("f", ((PsiMethod)results[1].getElement()).getName());
}
public void testStaticMethodInSubclass() throws Exception {
PsiJavaReference ref = (PsiJavaReference)configureByFile();
final JavaResolveResult result = ref.advancedResolve(true);
assertTrue(result.isValidResult());
assertResolvesToMethodInClass(result, "SomeSubClass");
}
private static void assertResolvesToMethodInClass(JavaResolveResult result, @NonNls String name) {
PsiMethod method = (PsiMethod)result.getElement();
assertNotNull(method);
assertTrue(result.isValidResult());
assertEquals(name, method.getContainingClass().getName());
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.bookkeeper.util.collections;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.collect.Lists;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.locks.StampedLock;
import java.util.function.LongFunction;
/**
* Map from long to an Object.
*
* <p>Provides similar methods as a {@code ConcurrentMap<long,Object>} with 2 differences:
* <ol>
* <li>No boxing/unboxing from long -> Long
* <li>Open hash map with linear probing, no node allocations to store the values
* </ol>
*
* @param <V>
*/
@SuppressWarnings("unchecked")
public class ConcurrentLongHashMap<V> {
private static final Object EmptyValue = null;
private static final Object DeletedValue = new Object();
private static final int DefaultExpectedItems = 256;
private static final int DefaultConcurrencyLevel = 16;
private static final float DefaultMapFillFactor = 0.66f;
private static final float DefaultMapIdleFactor = 0.15f;
private static final float DefaultExpandFactor = 2;
private static final float DefaultShrinkFactor = 2;
private static final boolean DefaultAutoShrink = false;
public static <V> Builder<V> newBuilder() {
return new Builder<>();
}
/**
* Builder of ConcurrentLongHashMap.
*/
public static class Builder<T> {
int expectedItems = DefaultExpectedItems;
int concurrencyLevel = DefaultConcurrencyLevel;
float mapFillFactor = DefaultMapFillFactor;
float mapIdleFactor = DefaultMapIdleFactor;
float expandFactor = DefaultExpandFactor;
float shrinkFactor = DefaultShrinkFactor;
boolean autoShrink = DefaultAutoShrink;
public Builder<T> expectedItems(int expectedItems) {
this.expectedItems = expectedItems;
return this;
}
public Builder<T> concurrencyLevel(int concurrencyLevel) {
this.concurrencyLevel = concurrencyLevel;
return this;
}
public Builder<T> mapFillFactor(float mapFillFactor) {
this.mapFillFactor = mapFillFactor;
return this;
}
public Builder<T> mapIdleFactor(float mapIdleFactor) {
this.mapIdleFactor = mapIdleFactor;
return this;
}
public Builder<T> expandFactor(float expandFactor) {
this.expandFactor = expandFactor;
return this;
}
public Builder<T> shrinkFactor(float shrinkFactor) {
this.shrinkFactor = shrinkFactor;
return this;
}
public Builder<T> autoShrink(boolean autoShrink) {
this.autoShrink = autoShrink;
return this;
}
public ConcurrentLongHashMap<T> build() {
return new ConcurrentLongHashMap<>(expectedItems, concurrencyLevel,
mapFillFactor, mapIdleFactor, autoShrink, expandFactor, shrinkFactor);
}
}
/**
* Predicate specialization for (long, V) types.
*
* @param <V>
*/
public interface LongObjectPredicate<V> {
boolean test(long key, V value);
}
private final Section<V>[] sections;
@Deprecated
public ConcurrentLongHashMap() {
this(DefaultExpectedItems);
}
@Deprecated
public ConcurrentLongHashMap(int expectedItems) {
this(expectedItems, DefaultConcurrencyLevel);
}
@Deprecated
public ConcurrentLongHashMap(int expectedItems, int concurrencyLevel) {
this(expectedItems, concurrencyLevel, DefaultMapFillFactor, DefaultMapIdleFactor,
DefaultAutoShrink, DefaultExpandFactor, DefaultShrinkFactor);
}
public ConcurrentLongHashMap(int expectedItems, int concurrencyLevel,
float mapFillFactor, float mapIdleFactor,
boolean autoShrink, float expandFactor, float shrinkFactor) {
checkArgument(expectedItems > 0);
checkArgument(concurrencyLevel > 0);
checkArgument(expectedItems >= concurrencyLevel);
checkArgument(mapFillFactor > 0 && mapFillFactor < 1);
checkArgument(mapIdleFactor > 0 && mapIdleFactor < 1);
checkArgument(mapFillFactor > mapIdleFactor);
checkArgument(expandFactor > 1);
checkArgument(shrinkFactor > 1);
int numSections = concurrencyLevel;
int perSectionExpectedItems = expectedItems / numSections;
int perSectionCapacity = (int) (perSectionExpectedItems / mapFillFactor);
this.sections = (Section<V>[]) new Section[numSections];
for (int i = 0; i < numSections; i++) {
sections[i] = new Section<>(perSectionCapacity, mapFillFactor, mapIdleFactor,
autoShrink, expandFactor, shrinkFactor);
}
}
public long size() {
long size = 0;
for (Section<V> s : sections) {
size += s.size;
}
return size;
}
long getUsedBucketCount() {
long usedBucketCount = 0;
for (Section<V> s : sections) {
usedBucketCount += s.usedBuckets;
}
return usedBucketCount;
}
public long capacity() {
long capacity = 0;
for (Section<V> s : sections) {
capacity += s.capacity;
}
return capacity;
}
public boolean isEmpty() {
for (Section<V> s : sections) {
if (s.size != 0) {
return false;
}
}
return true;
}
public V get(long key) {
long h = hash(key);
return getSection(h).get(key, (int) h);
}
public boolean containsKey(long key) {
return get(key) != null;
}
public V put(long key, V value) {
checkNotNull(value);
long h = hash(key);
return getSection(h).put(key, value, (int) h, false, null);
}
public V putIfAbsent(long key, V value) {
checkNotNull(value);
long h = hash(key);
return getSection(h).put(key, value, (int) h, true, null);
}
public V computeIfAbsent(long key, LongFunction<V> provider) {
checkNotNull(provider);
long h = hash(key);
return getSection(h).put(key, null, (int) h, true, provider);
}
public V remove(long key) {
long h = hash(key);
return getSection(h).remove(key, null, (int) h);
}
public boolean remove(long key, Object value) {
checkNotNull(value);
long h = hash(key);
return getSection(h).remove(key, value, (int) h) != null;
}
public int removeIf(LongObjectPredicate<V> predicate) {
checkNotNull(predicate);
int removedCount = 0;
for (Section<V> s : sections) {
removedCount += s.removeIf(predicate);
}
return removedCount;
}
private Section<V> getSection(long hash) {
// Use 32 msb out of long to get the section
final int sectionIdx = (int) (hash >>> 32) & (sections.length - 1);
return sections[sectionIdx];
}
public void clear() {
for (Section<V> s : sections) {
s.clear();
}
}
public void forEach(EntryProcessor<V> processor) {
for (Section<V> s : sections) {
s.forEach(processor);
}
}
/**
* @return a new list of all keys (makes a copy)
*/
public List<Long> keys() {
List<Long> keys = Lists.newArrayListWithExpectedSize((int) size());
forEach((key, value) -> keys.add(key));
return keys;
}
/**
* @return a new list of all keys (makes a copy)
*/
public List<V> values() {
List<V> values = Lists.newArrayListWithExpectedSize((int) size());
forEach((key, value) -> values.add(value));
return values;
}
/**
* An entry processor.
*/
public interface EntryProcessor<V> {
void accept(long key, V value);
}
// A section is a portion of the hash map that is covered by a single
@SuppressWarnings("serial")
private static final class Section<V> extends StampedLock {
private volatile long[] keys;
private volatile V[] values;
private volatile int capacity;
private final int initCapacity;
private volatile int size;
private int usedBuckets;
private int resizeThresholdUp;
private int resizeThresholdBelow;
private final float mapFillFactor;
private final float mapIdleFactor;
private final float expandFactor;
private final float shrinkFactor;
private final boolean autoShrink;
Section(int capacity, float mapFillFactor, float mapIdleFactor, boolean autoShrink,
float expandFactor, float shrinkFactor) {
this.capacity = alignToPowerOfTwo(capacity);
this.initCapacity = this.capacity;
this.keys = new long[this.capacity];
this.values = (V[]) new Object[this.capacity];
this.size = 0;
this.usedBuckets = 0;
this.autoShrink = autoShrink;
this.mapFillFactor = mapFillFactor;
this.mapIdleFactor = mapIdleFactor;
this.expandFactor = expandFactor;
this.shrinkFactor = shrinkFactor;
this.resizeThresholdUp = (int) (this.capacity * mapFillFactor);
this.resizeThresholdBelow = (int) (this.capacity * mapIdleFactor);
}
V get(long key, int keyHash) {
int bucket = keyHash;
long stamp = tryOptimisticRead();
boolean acquiredLock = false;
try {
while (true) {
int capacity = this.capacity;
bucket = signSafeMod(bucket, capacity);
// First try optimistic locking
long storedKey = keys[bucket];
V storedValue = values[bucket];
if (!acquiredLock && validate(stamp)) {
// The values we have read are consistent
if (storedKey == key) {
return storedValue != DeletedValue ? storedValue : null;
} else if (storedValue == EmptyValue) {
// Not found
return null;
}
} else {
// Fallback to acquiring read lock
if (!acquiredLock) {
stamp = readLock();
acquiredLock = true;
storedKey = keys[bucket];
storedValue = values[bucket];
}
if (capacity != this.capacity) {
// There has been a rehashing. We need to restart the search
bucket = keyHash;
continue;
}
if (storedKey == key) {
return storedValue != DeletedValue ? storedValue : null;
} else if (storedValue == EmptyValue) {
// Not found
return null;
}
}
++bucket;
}
} finally {
if (acquiredLock) {
unlockRead(stamp);
}
}
}
V put(long key, V value, int keyHash, boolean onlyIfAbsent, LongFunction<V> valueProvider) {
int bucket = keyHash;
long stamp = writeLock();
int capacity = this.capacity;
// Remember where we find the first available spot
int firstDeletedKey = -1;
try {
while (true) {
bucket = signSafeMod(bucket, capacity);
long storedKey = keys[bucket];
V storedValue = values[bucket];
if (storedKey == key) {
if (storedValue == EmptyValue) {
values[bucket] = value != null ? value : valueProvider.apply(key);
++size;
++usedBuckets;
return valueProvider != null ? values[bucket] : null;
} else if (storedValue == DeletedValue) {
values[bucket] = value != null ? value : valueProvider.apply(key);
++size;
return valueProvider != null ? values[bucket] : null;
} else if (!onlyIfAbsent) {
// Over written an old value for same key
values[bucket] = value;
return storedValue;
} else {
return storedValue;
}
} else if (storedValue == EmptyValue) {
// Found an empty bucket. This means the key is not in the map. If we've already seen a deleted
// key, we should write at that position
if (firstDeletedKey != -1) {
bucket = firstDeletedKey;
} else {
++usedBuckets;
}
keys[bucket] = key;
values[bucket] = value != null ? value : valueProvider.apply(key);
++size;
return valueProvider != null ? values[bucket] : null;
} else if (storedValue == DeletedValue) {
// The bucket contained a different deleted key
if (firstDeletedKey == -1) {
firstDeletedKey = bucket;
}
}
++bucket;
}
} finally {
if (usedBuckets > resizeThresholdUp) {
try {
int newCapacity = alignToPowerOfTwo((int) (capacity * expandFactor));
rehash(newCapacity);
} finally {
unlockWrite(stamp);
}
} else {
unlockWrite(stamp);
}
}
}
private void cleanDeletedStatus(int startBucket) {
// Cleanup all the buckets that were in `DeletedValue` state,
// so that we can reduce unnecessary expansions
int lastBucket = signSafeMod(startBucket - 1, capacity);
while (values[lastBucket] == DeletedValue) {
values[lastBucket] = (V) EmptyValue;
--usedBuckets;
lastBucket = signSafeMod(--lastBucket, capacity);
}
}
private V remove(long key, Object value, int keyHash) {
int bucket = keyHash;
long stamp = writeLock();
try {
while (true) {
int capacity = this.capacity;
bucket = signSafeMod(bucket, capacity);
long storedKey = keys[bucket];
V storedValue = values[bucket];
if (storedKey == key) {
if (value == null || value.equals(storedValue)) {
if (storedValue == EmptyValue || storedValue == DeletedValue) {
return null;
}
--size;
V nextValueInArray = values[signSafeMod(bucket + 1, capacity)];
if (nextValueInArray == EmptyValue) {
values[bucket] = (V) EmptyValue;
--usedBuckets;
cleanDeletedStatus(bucket);
} else {
values[bucket] = (V) DeletedValue;
}
return storedValue;
} else {
return null;
}
} else if (storedValue == EmptyValue) {
// Key wasn't found
return null;
}
++bucket;
}
} finally {
if (autoShrink && size < resizeThresholdBelow) {
try {
int newCapacity = alignToPowerOfTwo((int) (capacity / shrinkFactor));
int newResizeThresholdUp = (int) (newCapacity * mapFillFactor);
if (newCapacity < capacity && newResizeThresholdUp > size) {
// shrink the hashmap
rehash(newCapacity);
}
} finally {
unlockWrite(stamp);
}
} else {
unlockWrite(stamp);
}
}
}
int removeIf(LongObjectPredicate<V> filter) {
long stamp = writeLock();
int removedCount = 0;
try {
// Go through all the buckets for this section
int capacity = this.capacity;
for (int bucket = 0; bucket < capacity; bucket++) {
long storedKey = keys[bucket];
V storedValue = values[bucket];
if (storedValue != EmptyValue && storedValue != DeletedValue) {
if (filter.test(storedKey, storedValue)) {
// Removing item
--size;
++removedCount;
V nextValueInArray = values[signSafeMod(bucket + 1, capacity)];
if (nextValueInArray == EmptyValue) {
values[bucket] = (V) EmptyValue;
--usedBuckets;
cleanDeletedStatus(bucket);
} else {
values[bucket] = (V) DeletedValue;
}
}
}
}
return removedCount;
} finally {
if (autoShrink && size < resizeThresholdBelow) {
try {
int newCapacity = alignToPowerOfTwo((int) (capacity / shrinkFactor));
int newResizeThresholdUp = (int) (newCapacity * mapFillFactor);
if (newCapacity < capacity && newResizeThresholdUp > size) {
// shrink the hashmap
rehash(newCapacity);
}
} finally {
unlockWrite(stamp);
}
} else {
unlockWrite(stamp);
}
}
}
void clear() {
long stamp = writeLock();
try {
Arrays.fill(keys, 0);
Arrays.fill(values, EmptyValue);
this.size = 0;
this.usedBuckets = 0;
if (autoShrink) {
rehash(initCapacity);
}
} finally {
unlockWrite(stamp);
}
}
public void forEach(EntryProcessor<V> processor) {
long stamp = tryOptimisticRead();
int capacity = this.capacity;
long[] keys = this.keys;
V[] values = this.values;
boolean acquiredReadLock = false;
try {
// Validate no rehashing
if (!validate(stamp)) {
// Fallback to read lock
stamp = readLock();
acquiredReadLock = true;
capacity = this.capacity;
keys = this.keys;
values = this.values;
}
// Go through all the buckets for this section
for (int bucket = 0; bucket < capacity; bucket++) {
long storedKey = keys[bucket];
V storedValue = values[bucket];
if (!acquiredReadLock && !validate(stamp)) {
// Fallback to acquiring read lock
stamp = readLock();
acquiredReadLock = true;
storedKey = keys[bucket];
storedValue = values[bucket];
}
if (storedValue != DeletedValue && storedValue != EmptyValue) {
processor.accept(storedKey, storedValue);
}
}
} finally {
if (acquiredReadLock) {
unlockRead(stamp);
}
}
}
private void rehash(int newCapacity) {
// Expand the hashmap
long[] newKeys = new long[newCapacity];
V[] newValues = (V[]) new Object[newCapacity];
// Re-hash table
for (int i = 0; i < keys.length; i++) {
long storedKey = keys[i];
V storedValue = values[i];
if (storedValue != EmptyValue && storedValue != DeletedValue) {
insertKeyValueNoLock(newKeys, newValues, storedKey, storedValue);
}
}
keys = newKeys;
values = newValues;
usedBuckets = size;
// Capacity needs to be updated after the values, so that we won't see
// a capacity value bigger than the actual array size
capacity = newCapacity;
resizeThresholdUp = (int) (capacity * mapFillFactor);
resizeThresholdBelow = (int) (capacity * mapIdleFactor);
}
private static <V> void insertKeyValueNoLock(long[] keys, V[] values, long key, V value) {
int bucket = (int) hash(key);
while (true) {
bucket = signSafeMod(bucket, keys.length);
V storedValue = values[bucket];
if (storedValue == EmptyValue) {
// The bucket is empty, so we can use it
keys[bucket] = key;
values[bucket] = value;
return;
}
++bucket;
}
}
}
private static final long HashMixer = 0xc6a4a7935bd1e995L;
private static final int R = 47;
static final long hash(long key) {
long hash = key * HashMixer;
hash ^= hash >>> R;
hash *= HashMixer;
return hash;
}
static final int signSafeMod(long n, int max) {
return (int) n & (max - 1);
}
private static int alignToPowerOfTwo(int n) {
return (int) Math.pow(2, 32 - Integer.numberOfLeadingZeros(n - 1));
}
}
| |
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.buildtool;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.flogger.GoogleLogger;
import com.google.devtools.build.lib.actions.BuildFailedException;
import com.google.devtools.build.lib.analysis.AnalysisPhaseCompleteEvent;
import com.google.devtools.build.lib.analysis.AnalysisResult;
import com.google.devtools.build.lib.analysis.BuildView;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.ViewCreationFailedException;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.analysis.config.BuildOptions;
import com.google.devtools.build.lib.analysis.config.CoreOptions;
import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException;
import com.google.devtools.build.lib.buildeventstream.AbortedEvent;
import com.google.devtools.build.lib.buildeventstream.BuildEventIdUtil;
import com.google.devtools.build.lib.buildeventstream.BuildEventStreamProtos.Aborted.AbortReason;
import com.google.devtools.build.lib.buildtool.buildevent.NoAnalyzeEvent;
import com.google.devtools.build.lib.buildtool.buildevent.TestFilteringCompleteEvent;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.TargetParsingException;
import com.google.devtools.build.lib.cmdline.TargetPattern;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.pkgcache.LoadingFailedException;
import com.google.devtools.build.lib.profiler.ProfilePhase;
import com.google.devtools.build.lib.profiler.Profiler;
import com.google.devtools.build.lib.profiler.SilentCloseable;
import com.google.devtools.build.lib.runtime.BlazeModule;
import com.google.devtools.build.lib.runtime.CommandEnvironment;
import com.google.devtools.build.lib.server.FailureDetails.BuildConfiguration.Code;
import com.google.devtools.build.lib.server.FailureDetails.FailureDetail;
import com.google.devtools.build.lib.skyframe.BuildConfigurationValue;
import com.google.devtools.build.lib.skyframe.BuildInfoCollectionFunction;
import com.google.devtools.build.lib.skyframe.PrecomputedValue;
import com.google.devtools.build.lib.skyframe.TargetPatternPhaseValue;
import com.google.devtools.build.lib.util.AbruptExitException;
import com.google.devtools.build.lib.util.DetailedExitCode;
import com.google.devtools.build.lib.util.RegexFilter;
import com.google.devtools.common.options.OptionsParsingException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
/** Performs target pattern eval, configuration creation, loading and analysis. */
public final class AnalysisPhaseRunner {
private static final GoogleLogger logger = GoogleLogger.forEnclosingClass();
private AnalysisPhaseRunner() {}
public static AnalysisResult execute(
CommandEnvironment env,
BuildRequest request,
BuildOptions buildOptions,
TargetValidator validator)
throws BuildFailedException, InterruptedException, ViewCreationFailedException,
TargetParsingException, LoadingFailedException, AbruptExitException,
InvalidConfigurationException {
// Target pattern evaluation.
TargetPatternPhaseValue loadingResult;
Profiler.instance().markPhase(ProfilePhase.TARGET_PATTERN_EVAL);
try (SilentCloseable c = Profiler.instance().profile("evaluateTargetPatterns")) {
loadingResult = evaluateTargetPatterns(env, request, validator);
}
env.setWorkspaceName(loadingResult.getWorkspaceName());
// Compute the heuristic instrumentation filter if needed.
if (request.needsInstrumentationFilter()) {
try (SilentCloseable c = Profiler.instance().profile("Compute instrumentation filter")) {
String instrumentationFilter =
InstrumentationFilterSupport.computeInstrumentationFilter(
env.getReporter(),
// TODO(ulfjack): Expensive. Make this part of the TargetPatternPhaseValue or write
// a new SkyFunction to compute it?
loadingResult.getTestsToRun(env.getReporter(), env.getPackageManager()));
try {
// We're modifying the buildOptions in place, which is not ideal, but we also don't want
// to pay the price for making a copy. Maybe reconsider later if this turns out to be a
// problem (and the performance loss may not be a big deal).
buildOptions.get(CoreOptions.class).instrumentationFilter =
new RegexFilter.RegexFilterConverter().convert(instrumentationFilter);
} catch (OptionsParsingException e) {
throw new InvalidConfigurationException(Code.HEURISTIC_INSTRUMENTATION_FILTER_INVALID, e);
}
}
}
// Exit if there are any pending exceptions from modules.
env.throwPendingException();
AnalysisResult analysisResult = null;
if (request.getBuildOptions().performAnalysisPhase) {
Profiler.instance().markPhase(ProfilePhase.ANALYZE);
// The build info factories are immutable during the life time of this server. However, we
// sometimes clean the graph, which requires re-injecting the value, which requires a hook to
// do so afterwards, and there is no such hook at the server / workspace level right now. For
// simplicity, we keep the code here for now.
env.getSkyframeExecutor()
.injectExtraPrecomputedValues(
ImmutableList.of(
PrecomputedValue.injected(
BuildInfoCollectionFunction.BUILD_INFO_FACTORIES,
env.getRuntime().getRuleClassProvider().getBuildInfoFactoriesAsMap())));
try (SilentCloseable c = Profiler.instance().profile("runAnalysisPhase")) {
analysisResult =
runAnalysisPhase(env, request, loadingResult, buildOptions, request.getMultiCpus());
}
for (BlazeModule module : env.getRuntime().getBlazeModules()) {
module.afterAnalysis(env, request, buildOptions, analysisResult);
}
reportTargets(env, analysisResult);
for (ConfiguredTarget target : analysisResult.getTargetsToSkip()) {
BuildConfiguration config =
env.getSkyframeExecutor()
.getConfiguration(env.getReporter(), target.getConfigurationKey());
Label label = target.getLabel();
env.getEventBus()
.post(
new AbortedEvent(
BuildEventIdUtil.targetCompleted(label, config.getEventId()),
AbortReason.SKIPPED,
String.format("Target %s build was skipped.", label),
label));
}
} else {
env.getReporter().handle(Event.progress("Loading complete."));
env.getReporter().post(new NoAnalyzeEvent());
logger.atInfo().log("No analysis requested, so finished");
FailureDetail failureDetail = BuildView.createFailureDetail(loadingResult, null, null);
if (failureDetail != null) {
throw new BuildFailedException(
failureDetail.getMessage(), DetailedExitCode.of(failureDetail));
}
}
return analysisResult;
}
private static TargetPatternPhaseValue evaluateTargetPatterns(
CommandEnvironment env, final BuildRequest request, final TargetValidator validator)
throws LoadingFailedException, TargetParsingException, InterruptedException {
boolean keepGoing = request.getKeepGoing();
TargetPatternPhaseValue result =
env.getSkyframeExecutor()
.loadTargetPatternsWithFilters(
env.getReporter(),
request.getTargets(),
env.getRelativeWorkingDirectory(),
request.getLoadingOptions(),
request.getLoadingPhaseThreadCount(),
keepGoing,
request.shouldRunTests());
if (validator != null) {
Collection<Target> targets =
result.getTargets(env.getReporter(), env.getSkyframeExecutor().getPackageManager());
validator.validateTargets(targets, keepGoing);
}
return result;
}
/**
* Performs the initial phases 0-2 of the build: Setup, Loading and Analysis.
*
* <p>Postcondition: On success, populates the BuildRequest's set of targets to build.
*
* @return null if loading / analysis phases were successful; a useful error message if loading or
* analysis phase errors were encountered and request.keepGoing.
* @throws InterruptedException if the current thread was interrupted.
* @throws ViewCreationFailedException if analysis failed for any reason.
*/
private static AnalysisResult runAnalysisPhase(
CommandEnvironment env,
BuildRequest request,
TargetPatternPhaseValue loadingResult,
BuildOptions targetOptions,
Set<String> multiCpu)
throws InterruptedException, InvalidConfigurationException, ViewCreationFailedException {
Stopwatch timer = Stopwatch.createStarted();
env.getReporter().handle(Event.progress("Loading complete. Analyzing..."));
ImmutableSet<String> explicitTargetPatterns =
getExplicitTargetPatterns(env, request.getTargets());
BuildView view =
new BuildView(
env.getDirectories(),
env.getRuntime().getRuleClassProvider(),
env.getSkyframeExecutor(),
env.getRuntime().getCoverageReportActionFactory(request));
AnalysisResult analysisResult =
view.update(
loadingResult,
targetOptions,
multiCpu,
explicitTargetPatterns,
request.getAspects(),
request.getViewOptions(),
request.getKeepGoing(),
request.getCheckForActionConflicts(),
request.getLoadingPhaseThreadCount(),
request.getTopLevelArtifactContext(),
env.getReporter(),
env.getEventBus());
// TODO(bazel-team): Merge these into one event.
env.getEventBus()
.post(
new AnalysisPhaseCompleteEvent(
analysisResult.getTargetsToBuild(),
view.getEvaluatedCounts(),
view.getEvaluatedActionsCounts(),
timer.stop().elapsed(TimeUnit.MILLISECONDS),
view.getAndClearPkgManagerStatistics(),
env.getSkyframeExecutor().wasAnalysisCacheDiscardedAndResetBit()));
ImmutableSet<BuildConfigurationValue.Key> configurationKeys =
Stream.concat(
analysisResult
.getTargetsToBuild()
.stream()
.map(ConfiguredTarget::getConfigurationKey)
.distinct(),
analysisResult.getTargetsToTest() == null
? Stream.empty()
: analysisResult
.getTargetsToTest()
.stream()
.map(ConfiguredTarget::getConfigurationKey)
.distinct())
.filter(Objects::nonNull)
.distinct()
.collect(ImmutableSet.toImmutableSet());
Map<BuildConfigurationValue.Key, BuildConfiguration> configurationMap =
env.getSkyframeExecutor().getConfigurations(env.getReporter(), configurationKeys);
env.getEventBus()
.post(
new TestFilteringCompleteEvent(
analysisResult.getTargetsToBuild(),
analysisResult.getTargetsToTest(),
analysisResult.getTargetsToSkip(),
configurationMap));
return analysisResult;
}
private static void reportTargets(CommandEnvironment env, AnalysisResult analysisResult) {
Collection<ConfiguredTarget> targetsToBuild = analysisResult.getTargetsToBuild();
Collection<ConfiguredTarget> targetsToTest = analysisResult.getTargetsToTest();
if (targetsToTest != null) {
int testCount = targetsToTest.size();
int targetCount = targetsToBuild.size() - testCount;
if (targetCount == 0) {
env.getReporter()
.handle(
Event.info(
"Found "
+ testCount
+ (testCount == 1 ? " test target..." : " test targets...")));
} else {
env.getReporter()
.handle(
Event.info(
"Found "
+ targetCount
+ (targetCount == 1 ? " target and " : " targets and ")
+ testCount
+ (testCount == 1 ? " test target..." : " test targets...")));
}
} else {
int targetCount = targetsToBuild.size();
env.getReporter()
.handle(
Event.info(
"Found " + targetCount + (targetCount == 1 ? " target..." : " targets...")));
}
}
/**
* Turns target patterns from the command line into parsed equivalents for single targets.
*
* <p>Globbing targets like ":all" and "..." are ignored here and will not be in the returned set.
*
* @param env the action's environment.
* @param requestedTargetPatterns the list of target patterns specified on the command line.
* @return the set of stringified labels of target patterns that represent single targets. The
* stringified labels are in the "unambiguous canonical form".
* @throws ViewCreationFailedException if a pattern fails to parse for some reason.
*/
private static ImmutableSet<String> getExplicitTargetPatterns(
CommandEnvironment env, List<String> requestedTargetPatterns)
throws ViewCreationFailedException {
ImmutableSet.Builder<String> explicitTargetPatterns = ImmutableSet.builder();
TargetPattern.Parser parser = new TargetPattern.Parser(env.getRelativeWorkingDirectory());
for (String requestedTargetPattern : requestedTargetPatterns) {
if (requestedTargetPattern.startsWith("-")) {
// Excluded patterns are by definition not explicitly requested so we can move on to the
// next target pattern.
continue;
}
// Parse the pattern. This should always work because this is at least the second time we're
// doing it. The previous time is in runAnalysisPhase(). Still, if parsing does fail we
// propagate the exception up.
TargetPattern parsedPattern;
try {
parsedPattern = parser.parse(requestedTargetPattern);
} catch (TargetParsingException e) {
throw new ViewCreationFailedException(
"Failed to parse target pattern even though it was previously parsed successfully",
e.getDetailedExitCode().getFailureDetail(),
e);
}
if (parsedPattern.getType() == TargetPattern.Type.SINGLE_TARGET) {
explicitTargetPatterns.add(parsedPattern.getSingleTargetPath());
}
}
return ImmutableSet.copyOf(explicitTargetPatterns.build());
}
}
| |
package InternalUnitParser.Operations;
import InternalUnitParser.CSharpAdaptation.*;
import InternalUnitParser.Classes.*;
import InternalUnitParser.Methods.*;
import UnitParser.*;
import UnitParser.UnitP.*;
import java.util.ArrayList;
@SuppressWarnings("serial")
public class Managed
{
public static UnitInfo PerformManagedOperationUnits(UnitInfo firstInfo, double second, Operations operation)
{
return PerformManagedOperationUnits
(
firstInfo, ExceptionInstantiation.NewUnitInfo(second), operation
);
}
public static UnitInfo PerformManagedOperationUnits(double first, UnitInfo secondInfo, Operations operation)
{
return PerformManagedOperationUnits
(
ExceptionInstantiation.NewUnitInfo(first), secondInfo, operation
);
}
//This method should always be used when dealing with random UnitInfo variables because it accounts for all the
//possible scenarios. On the other hand, with simple operations (e.g., random UnitInfo & numeric type) it might
//be better to use PerformManagedOperationValues.
public static UnitInfo PerformManagedOperationUnits(UnitInfo firstInfo, UnitInfo secondInfo, Operations operation)
{
ErrorTypes errorType = MethodsCommon.GetOperationError
(
firstInfo, secondInfo, operation
);
if (errorType != ErrorTypes.None)
{
return ExceptionInstantiation.NewUnitInfo(firstInfo, errorType);
}
return
(
operation == Operations.Addition || operation == Operations.Subtraction ?
PerformManagedOperationAddition(firstInfo, secondInfo, operation) :
PerformManagedOperationMultiplication(firstInfo, secondInfo, operation)
);
}
static UnitInfo PerformManagedOperationAddition(UnitInfo firstInfo, UnitInfo secondInfo, Operations operation)
{
return PerformManagedOperationNormalisedValues
(
//After being normalised, the operands might require further modifications.
firstInfo, GetOperandsAddition(firstInfo, secondInfo, operation), operation
);
}
static ArrayList<UnitInfo> GetOperandsAddition(UnitInfo firstInfo, UnitInfo secondInfo, Operations operation)
{
ArrayList<UnitInfo> operands2 = new ArrayList<UnitInfo>()
{{
add(ExceptionInstantiation.NewUnitInfo(firstInfo)); add(ExceptionInstantiation.NewUnitInfo(secondInfo));
}};
if (operands2.get(0).BaseTenExponent != operands2.get(1).BaseTenExponent || operands2.get(0).Prefix.getFactor() != operands2.get(1).Prefix.getFactor())
{
//The addition/subtraction might not be performed right away even with normalised values.
//For example: 5 and 6 from 5*10^2 and 6*10^7 cannot be added right away.
ArrayList<UnitInfo> operandArgs = new ArrayList<UnitInfo>();
operandArgs.add(NormaliseUnitInfo(operands2.get(0)));
operandArgs.add(NormaliseUnitInfo(operands2.get(1)));
operands2 = AdaptNormalisedValuesForAddition
(
operandArgs, operation
);
}
return operands2;
}
static ArrayList<UnitInfo> AdaptNormalisedValuesForAddition(ArrayList<UnitInfo> unitInfos2, Operations operation)
{
if (unitInfos2.get(0).BaseTenExponent == unitInfos2.get(1).BaseTenExponent)
{
//Having the same BaseTenExponent values means that the given operation can be performed right away.
return unitInfos2;
}
int[] bigSmallI =
(
unitInfos2.get(0).BaseTenExponent > unitInfos2.get(1).BaseTenExponent ?
new int[] { 0, 1 } : new int[] { 1, 0 }
);
//Only the variable with the bigger value is modified. For example: 5*10^5 & 3*10^3 is converted
//into 500*10^3 & 3*10^3 in order to allow the addition 500 + 3.
UnitInfo big2 = AdaptBiggerAdditionOperand(unitInfos2, bigSmallI, operation);
if (big2.Error.getType() != ErrorTypes.None)
{
return TooBigGapAddition(unitInfos2, bigSmallI, operation);
}
unitInfos2.set
(
bigSmallI[0], ExceptionInstantiation.NewUnitInfo
(
unitInfos2.get(bigSmallI[0]), big2.Value, unitInfos2.get(bigSmallI[1]).BaseTenExponent
)
);
return unitInfos2;
}
//When adding/subtracting two numbers whose gap is bigger than the maximum double range, there
//is no need to perform any operation (i.e., no change will be observed because of being outside
//the maximum supported precision). This method takes care of these cases and returns the expected
//output (i.e., biggest value).
static ArrayList<UnitInfo> TooBigGapAddition(ArrayList<UnitInfo> unitInfos2, int[] bigSmallI, Operations operation)
{
ArrayList<UnitInfo> outInfos = new ArrayList<UnitInfo>()
{{
//First operand (i.e., one whose information defines the operation) together with the
//numeric information (i.e., just Value and BaseTenExponent because both are normalised)
//which is associated with the biggest one.
add
(
ExceptionInstantiation.NewUnitInfo
(
unitInfos2.get(0), unitInfos2.get(bigSmallI[0]).Value,
unitInfos2.get(bigSmallI[0]).BaseTenExponent
)
);
}};
if (operation == Operations.Subtraction && bigSmallI[0] == 1)
{
outInfos.get(0).Value = -1.0 * outInfos.get(0).Value;
}
if (outInfos.get(0).Unit == Units.Unitless)
{
outInfos.get(0).Unit = unitInfos2.get(bigSmallI[1]).Unit;
}
return outInfos;
}
static UnitInfo AdaptBiggerAdditionOperand(ArrayList<UnitInfo> unitInfos2, int[] bigSmallI, Operations operation)
{
int gapExponent = unitInfos2.get(bigSmallI[0]).BaseTenExponent - unitInfos2.get(bigSmallI[1]).BaseTenExponent;
if (gapExponent >= 27)
{
//The difference between both inputs is bigger than (or, at least, very close to) the maximum double value/precision;
//what makes this situation calculation unworthy and the first operand to be returned as the result.
//Note that the error below these lines is just an easy way to tell the calling function about this eventuality.
return ExceptionInstantiation.NewUnitInfo(unitInfos2.get(0), ErrorTypes.InvalidOperation);
}
//PerformManagedOperationValues is used to make sure that the resulting numeric information is stored
//in Value (if possible).
UnitInfo big2 = PerformManagedOperationValues
(
RaiseToIntegerExponent(10.0, gapExponent), unitInfos2.get(bigSmallI[0]).Value,
Operations.Multiplication
);
boolean isWrong =
(
big2.Error.getType() != ErrorTypes.None || big2.BaseTenExponent != 0 ?
//The value of the bigger input times 10^(gap between BaseTenExponent of inputs) is too big.
isWrong = true :
//Overflow-check very unlikely to trigger an error. In fact, with properly normalised variables,
//triggering an error would be plainly impossible.
AreAdditionFinalValuesWrong
(
unitInfos2.get(0).Value, unitInfos2.get(1).Value, operation
)
);
UnitInfo output = null;
if (isWrong)
{
output = ExceptionInstantiation.NewUnitInfo(unitInfos2.get(0), ErrorTypes.InvalidOperation);
}
else
{
output = ExceptionInstantiation.NewUnitInfo(unitInfos2.get(bigSmallI[0]), big2.Value);
}
return output;
}
static boolean AreAdditionFinalValuesWrong(double val1, double val2, Operations operation)
{
boolean isWrong = false;
try
{
val1 = val1 + val2 *
(
operation == Operations.Addition ? 1 : -1
);
}
catch (Exception e) { isWrong = true; }
return isWrong;
}
static UnitInfo PerformManagedOperationMultiplication(UnitInfo firstInfo, UnitInfo secondInfo, Operations operation)
{
return PerformManagedOperationNormalisedValues
(
firstInfo, new ArrayList<UnitInfo>()
{{
add(NormaliseUnitInfo(firstInfo));
add(NormaliseUnitInfo(secondInfo));
}},
operation
);
}
public static UnitInfo RaiseToIntegerExponent(double baseValue, int exponent)
{
return RaiseToIntegerExponent(ExceptionInstantiation.NewUnitInfo(baseValue), exponent);
}
public static UnitInfo RaiseToIntegerExponent(UnitInfo baseInfo, int exponent)
{
if (exponent <= 1 && exponent >= 0)
{
baseInfo.Value = (exponent == 0 ? 1.0 : baseInfo.Value);
return baseInfo;
}
UnitInfo outInfo = ExceptionInstantiation.NewUnitInfo(baseInfo);
for (int i = 1; i < Math.abs(exponent); i++)
{
outInfo = PerformManagedOperationValues
(
outInfo, baseInfo, Operations.Multiplication
);
if (outInfo.Error.getType() != ErrorTypes.None) return outInfo;
}
return
(
exponent < 0 ?
PerformManagedOperationValues(ExceptionInstantiation.NewUnitInfo(1.0), outInfo, Operations.Division) :
outInfo
);
}
static UnitInfo PerformManagedOperationNormalisedValues(UnitInfo outInfo, ArrayList<UnitInfo> normalisedInfos, Operations operation)
{
return
(
normalisedInfos.size() == 1 ?
//There is just one operand when the difference between both of them is too big.
normalisedInfos.get(0) :
PerformManagedOperationTwoOperands(outInfo, normalisedInfos, operation)
);
}
static UnitInfo PerformManagedOperationTwoOperands(UnitInfo outInfo, ArrayList<UnitInfo> normalisedInfos, Operations operation)
{
UnitInfo outInfoNormalised = PerformManagedOperationValues
(
normalisedInfos.get(0), normalisedInfos.get(1), operation
);
if (outInfo.Error.getType() != ErrorTypes.None)
{
return ExceptionInstantiation.NewUnitInfo(outInfo, ErrorTypes.NumericError);
}
outInfo.BaseTenExponent = outInfoNormalised.BaseTenExponent;
outInfo.Value = outInfoNormalised.Value;
//Normalised means no prefixes.
outInfo.Prefix = new Prefix(outInfo.Prefix.getPrefixUsage());
return outInfo;
}
public static UnitInfo PerformManagedOperationValues(double firstValue, double secondValue, Operations operation)
{
return PerformManagedOperationValues
(
ExceptionInstantiation.NewUnitInfo(firstValue), ExceptionInstantiation.NewUnitInfo(secondValue), operation
);
}
static UnitInfo PerformManagedOperationValues(UnitInfo firstInfo, double secondValue, Operations operation)
{
return PerformManagedOperationValues
(
firstInfo, ExceptionInstantiation.NewUnitInfo(secondValue), operation
);
}
//This method might be used to perform full operations (not just being the last calculation step) instead
//of the default one (PerformManagedOperationUnits) for simple cases. That is: ones not dealing with the
//complex numeric reality (Value, Prefix and BaseTenExponent) which makes a pre-analysis required.
//Note that, unlikely what happens with PerformMangedOperationUnits, the outputs of this method aren't
//normalised (= primarily stored under Value), what is useful in certain contexts.
//NOTE: this function assumes that both inputs are normalised, what means that no prefix information is expected.
//It might also be used with non-normalised inputs, but their prefix information would be plainly ignored.
public static UnitInfo PerformManagedOperationValues(UnitInfo firstInfo, UnitInfo secondInfo, Operations operation)
{
if (firstInfo.Value == 0.0 || secondInfo.Value == 0.0)
{
if (operation == Operations.Multiplication || operation == Operations.Division)
{
//Dividing by zero scenarios are taken into account somewhere else.
return ExceptionInstantiation.NewUnitInfo(firstInfo, 0.0);
}
}
UnitInfo outInfo = ExceptionInstantiation.NewUnitInfo(firstInfo);
UnitInfo firstInfo0 = ExceptionInstantiation.NewUnitInfo(firstInfo);
UnitInfo secondInfo0 = ExceptionInstantiation.NewUnitInfo(secondInfo);
boolean isWrong = false;
try
{
if (operation == Operations.Addition)
{
outInfo.Value += secondInfo0.Value;
}
else if (operation == Operations.Subtraction)
{
outInfo.Value -= secondInfo.Value;
}
else
{
//The reason for checking whether BaseTenExponent is inside/outside the int range before performing
//the operation (rather than going ahead and eventually catching the resulting exception) isn't just
//being quicker, but also the only option in many situations. Note that an addition/subtraction between
//two int variables whose result is outside the int range might not trigger an exception (+ random
//negative value as output).
if
(
VaryBaseTenExponent
(
outInfo, secondInfo0.BaseTenExponent, operation == Operations.Division
)
.Error.getType() != ErrorTypes.None)
{
return new UnitInfo(outInfo, ErrorTypes.InvalidOperation);
}
if (operation == Operations.Multiplication)
{
outInfo.Value *= secondInfo.Value;
outInfo.BaseTenExponent += secondInfo.BaseTenExponent;
}
else if (operation == Operations.Division)
{
if (secondInfo.Value == 0.0)
{
UnitInfo output = new UnitInfo(outInfo);
output.Error = new ErrorInfo(ErrorTypes.NumericError);
return output;
}
outInfo.Value /= secondInfo.Value;
outInfo.BaseTenExponent -= secondInfo.BaseTenExponent;
}
}
}
catch (Exception e) { isWrong = true; }
return
(
//An error might not be triggered despite of dealing with numbers outside double precision.
//For example: 0.00000000000000000001m * 0.0000000000000000000001m can output 0.0 without triggering an error.
isWrong || ((operation == Operations.Multiplication || operation == Operations.Division) && outInfo.Value == 0.0) ?
OperationValuesManageError(firstInfo0, secondInfo0, operation) : outInfo
);
}
static UnitInfo OperationValuesManageError(UnitInfo outInfo, UnitInfo secondInfo, Operations operation)
{
if (operation != Operations.Multiplication && operation != Operations.Division)
{
//This condition should never be true on account of the fact that the pre-modifications performed before
//adding/subtracting should avoid erroneous situations.
return ExceptionInstantiation.NewUnitInfo(outInfo, ErrorTypes.InvalidOperation);
}
UnitInfo secondInfo2 = ConvertValueToBaseTen(secondInfo.Value);
outInfo = VaryBaseTenExponent(outInfo, secondInfo2.BaseTenExponent, operation == Operations.Division);
if (Math.abs(secondInfo2.Value) == 1.0 || outInfo.Error.getType() != ErrorTypes.None) return outInfo;
try
{
outInfo = PerformManagedOperationUnits
(
outInfo, secondInfo2.Value, operation
);
}
catch(Exception e)
{
//Very unlikely scenario on account of the fact that Math.abs(secondInfo2.Value)
//lies within the 0.1-10.0 range.
UnitInfo arg1 = ExceptionInstantiation.NewUnitInfo(outInfo);
arg1.Value = secondInfo2.Value;
arg1.BaseTenExponent = 0;
UnitInfo arg2 = ExceptionInstantiation.NewUnitInfo();
arg2.Value = outInfo.Value;
arg2.BaseTenExponent = outInfo.BaseTenExponent;
outInfo = OperationValuesManageError(arg1, arg2, operation);
}
return outInfo;
}
static UnitInfo ConvertValueToBaseTen(double value)
{
value = Math.abs(value);
return FromValueToBaseTenExponent
(
ExceptionInstantiation.NewUnitInfo(value), Math.abs(value), false
);
}
public static UnitInfo ConvertBaseTenToValue(UnitInfo unitInfo)
{
if (unitInfo.BaseTenExponent == 0) return unitInfo;
UnitInfo outInfo = ExceptionInstantiation.NewUnitInfo(unitInfo);
boolean decrease = unitInfo.BaseTenExponent > 0;
int sign = (int)Math.signum(outInfo.Value);
double absValue = Math.abs(outInfo.Value);
while (outInfo.BaseTenExponent != 0.0)
{
if (decrease)
{
if (absValue >= OperationsOther.MaxValue / 10.0) break;
absValue *= 10.0;
outInfo.BaseTenExponent -= 1;
}
else
{
if (absValue <= OperationsOther.MinValue * 10.0) break;
absValue /= 10.0;
outInfo.BaseTenExponent += 1;
}
}
outInfo.Value = sign * absValue;
return outInfo;
}
public static UnitInfo NormaliseUnitInfo(UnitInfo unitInfo)
{
if (unitInfo.Value == 0 && unitInfo.Prefix.getFactor() == 1.0)
{
return unitInfo;
}
UnitInfo outInfo = ExceptionInstantiation.NewUnitInfo(unitInfo);
if (outInfo.Prefix.getFactor() != 1)
{
outInfo = FromValueToBaseTenExponent
(
outInfo, outInfo.Prefix.getFactor(), true
);
outInfo.Prefix = new Prefix(outInfo.Prefix.getPrefixUsage());
}
if (outInfo.Value == 0.0) return outInfo;
outInfo = FromValueToBaseTenExponent
(
outInfo, outInfo.Value, false
);
return outInfo;
}
static UnitInfo FromValueToBaseTenExponent(UnitInfo outInfo, double value, boolean isPrefix)
{
if (value == 0.0) return outInfo;
double valueAbs = Math.abs(value);
boolean decrease = (valueAbs > 1.0);
if (!isPrefix)
{
outInfo.Value = outInfo.Value / valueAbs;
}
while (valueAbs != 1.0)
{
if ((valueAbs < 10.0 && valueAbs > 1.0) || (valueAbs > 0.1 && valueAbs < 1.0))
{
if (!isPrefix) outInfo.Value = value;
else
{
outInfo = PerformManagedOperationValues
(
outInfo, value, Operations.Multiplication
);
}
return outInfo;
}
if (decrease)
{
value /= 10.0;
outInfo.BaseTenExponent += 1;
}
else
{
value *= 10.0;
outInfo.BaseTenExponent -= 1;
}
valueAbs = Math.abs(value);
}
return outInfo;
}
public static UnitInfo VaryBaseTenExponent(UnitInfo info, int baseTenIncrease)
{
return VaryBaseTenExponent(info, baseTenIncrease, false);
}
//Method used to vary BaseTenExponent without provoking unhandled exceptions (i.e., bigger than Integer.MAX_VALUE).
static UnitInfo VaryBaseTenExponent(UnitInfo info, int baseTenIncrease, boolean isDivision)
{
long val1 = info.BaseTenExponent;
long val2 = baseTenIncrease;
if (isDivision)
{
//Converting a negative value into positive might provoke an overflow error for the int type
//(e.g., Math.abs(Integer.MAIN_VALUE)). Converting both variables to long is a quick and effective
//way to avoid this problem.
val2 *= -1;
}
UnitInfo outInfo = null;
if ((val2 > 0 && val1 > Integer.MAX_VALUE - val2) || (val2 < 0 && val1 < Integer.MIN_VALUE - val2))
{
outInfo = ExceptionInstantiation.NewUnitInfo(info, ErrorTypes.NumericError);
}
else
{
outInfo = ExceptionInstantiation.NewUnitInfo(info);
outInfo.BaseTenExponent = (int)(val1 + val2);
}
return outInfo;
}
}
| |
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.integrationtests;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.drools.core.base.ClassObjectType;
import org.drools.kiesession.rulebase.InternalKnowledgeBase;
import org.drools.core.reteoo.AlphaNode;
import org.drools.core.reteoo.CompositeObjectSinkAdapter;
import org.drools.core.reteoo.EntryPointNode;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.spi.AlphaNodeFieldConstraint;
import org.drools.modelcompiler.constraints.LambdaConstraint;
import org.drools.mvel.MVELConstraint;
import org.drools.testcoverage.common.model.FactWithList;
import org.drools.testcoverage.common.model.Person;
import org.drools.testcoverage.common.util.KieBaseTestConfiguration;
import org.drools.testcoverage.common.util.KieBaseUtil;
import org.drools.testcoverage.common.util.TestParametersUtil;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.kie.api.KieBase;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.rule.Agenda;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@RunWith(Parameterized.class)
public class SharingTest {
private final KieBaseTestConfiguration kieBaseTestConfiguration;
public SharingTest(final KieBaseTestConfiguration kieBaseTestConfiguration) {
this.kieBaseTestConfiguration = kieBaseTestConfiguration;
}
@Parameterized.Parameters(name = "KieBase type={0}")
public static Collection<Object[]> getParameters() {
return TestParametersUtil.getKieBaseCloudConfigurations(true);
}
public static class TestStaticUtils {
public static final NestedObj nestedObj = new NestedObj();
public static final int FINAL_1 = 1;
public static int nonFinal1 = 1;
public static int return1() {
return 1;
}
}
public static class NestedObj {
public static final int FINAL_1 = 1;
}
public static enum TestEnum {
AAA,
BBB,
CCC;
}
@Test
public void testDontShareAlphaWithStaticMethod() {
// DROOLS-6418
final String drl1 = "package c;\n" +
"import " + TestObject.class.getCanonicalName() + "\n" +
"import " + TestStaticUtils.class.getCanonicalName() + "\n" +
"rule R1 when\n" +
" TestObject(value == 1)\n" +
"then\n" +
"end\n" +
"rule R2 when\n" +
" TestObject(value == TestStaticUtils.return1())\n" + // return1() doesn't guarantee that it always returns 1
"then\n" +
"end\n" +
"rule R3 when\n" +
" TestObject(value == 0 )\n" +
"then\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("sharing-test", kieBaseTestConfiguration, drl1);
ObjectTypeNode otn = getObjectTypeNode(kbase, TestObject.class);
assertSinksSize(otn, 3); // Not shared
assertHashableSinksSize(otn, 2);
assertNonHashableConstraint(otn, "value == TestStaticUtils.return1()");
final KieSession kieSession = kbase.newKieSession();
try {
kieSession.insert(new TestObject(1));
assertEquals(2, kieSession.fireAllRules());
} finally {
kieSession.dispose();
}
}
private ObjectTypeNode getObjectTypeNode(KieBase kbase, Class<?> factClass) {
EntryPointNode epn = ((InternalKnowledgeBase) kbase).getRete().getEntryPointNodes().values().iterator().next();
return epn.getObjectTypeNodes().get(new ClassObjectType(factClass));
}
private void assertSinksSize(ObjectTypeNode otn, int expected) {
assertEquals(expected, otn.getSinks().length);
}
private void assertHashableSinksSize(ObjectTypeNode otn, int expected) {
CompositeObjectSinkAdapter sinkAdapter = (CompositeObjectSinkAdapter) otn.getObjectSinkPropagator();
if (expected == 0) {
assertNull(sinkAdapter.getHashableSinks());
} else {
assertEquals(expected, sinkAdapter.getHashableSinks().size());
}
}
private void assertNonHashableConstraint(ObjectTypeNode otn, String expected) {
CompositeObjectSinkAdapter sinkAdapter = (CompositeObjectSinkAdapter) otn.getObjectSinkPropagator();
AlphaNode alpha = (AlphaNode) sinkAdapter.getOtherSinks().get(0);
AlphaNodeFieldConstraint constraint = alpha.getConstraint();
if (constraint instanceof MVELConstraint) {
assertEquals(expected, ((MVELConstraint) constraint).getExpression());
} else if (constraint instanceof LambdaConstraint) {
assertEquals(expected, ((LambdaConstraint) constraint).getPredicateInformation().getStringConstraint());
}
}
@Test
public void testDontShareAlphaWithNonFinalField() {
// DROOLS-6418
final String drl = "package com.example;\n" +
"import " + TestObject.class.getCanonicalName() + "\n" +
"import " + TestStaticUtils.class.getCanonicalName() + "\n" +
"rule R1 when\n" +
" TestObject(value == 1)\n" +
"then\n" +
"end\n" +
"rule R2 when\n" +
" TestObject(value == TestStaticUtils.nonFinal1)\n" + // nonFinal1 doesn't guarantee that it always returns 1
"then\n" +
"end\n" +
"rule R3 when\n" +
" TestObject(value == 0 )\n" +
"then\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("sharing-test", kieBaseTestConfiguration, drl);
ObjectTypeNode otn = getObjectTypeNode(kbase, TestObject.class);
assertSinksSize(otn, 3); // Not shared
assertHashableSinksSize(otn, 2);
assertNonHashableConstraint(otn, "value == TestStaticUtils.nonFinal1");
final KieSession kieSession = kbase.newKieSession();
try {
kieSession.insert(new TestObject(1));
assertEquals(2, kieSession.fireAllRules());
} finally {
kieSession.dispose();
}
}
@Test
public void testShareAlphaWithFinalField() {
// DROOLS-6418
final String drl = "package com.example;\n" +
"import " + TestObject.class.getCanonicalName() + "\n" +
"import " + TestStaticUtils.class.getCanonicalName() + "\n" +
"rule R1 when\n" +
" TestObject(value == 1)\n" +
"then\n" +
"end\n" +
"rule R2 when\n" +
" TestObject(value == TestStaticUtils.FINAL_1)\n" + // FINAL_1 always returns 1
"then\n" +
"end\n" +
"rule R3 when\n" +
" TestObject(value == 0 )\n" +
"then\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("sharing-test", kieBaseTestConfiguration, drl);
ObjectTypeNode otn = getObjectTypeNode(kbase, TestObject.class);
if (kieBaseTestConfiguration.isExecutableModel()) {
assertSinksSize(otn, 3); // exec-model doesn't share the final filed nodes. For improvement, see DROOLS-6485
assertNonHashableConstraint(otn, "value == TestStaticUtils.FINAL_1");
} else {
assertSinksSize(otn, 2); // "value == 1" and "value == TestStaticUtils.FINAL_1" are shared
}
assertHashableSinksSize(otn, 2);
final KieSession kieSession = kbase.newKieSession();
try {
kieSession.insert(new TestObject(1));
assertEquals(2, kieSession.fireAllRules());
} finally {
kieSession.dispose();
}
}
@Test
public void testShareAlphaWithNestedFinalField() {
// DROOLS-6418
final String drl = "package com.example;\n" +
"import " + TestObject.class.getCanonicalName() + "\n" +
"import " + TestStaticUtils.class.getCanonicalName() + "\n" +
"rule R1 when\n" +
" TestObject(value == 1)\n" +
"then\n" +
"end\n" +
"rule R2 when\n" +
" TestObject(value == TestStaticUtils.nestedObj.FINAL_1)\n" + // FINAL_1 always returns 1
"then\n" +
"end\n" +
"rule R3 when\n" +
" TestObject(value == 0 )\n" +
"then\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("sharing-test", kieBaseTestConfiguration, drl);
ObjectTypeNode otn = getObjectTypeNode(kbase, TestObject.class);
// standard-drl cannot analyze nested final field. So not shared
// exec-model doesn't share the final filed nodes. For improvement, see DROOLS-6485
assertSinksSize(otn, 3);
assertHashableSinksSize(otn, 2);
assertNonHashableConstraint(otn, "value == TestStaticUtils.nestedObj.FINAL_1");
final KieSession kieSession = kbase.newKieSession();
try {
kieSession.insert(new TestObject(1));
assertEquals(2, kieSession.fireAllRules());
} finally {
kieSession.dispose();
}
}
@Test
public void testShareAlphaWithEnum() {
// DROOLS-6418
final String drl = "package com.example;\n" +
"import " + TestObject.class.getCanonicalName() + "\n" +
"import " + TestEnum.class.getCanonicalName() + "\n" +
"rule R1 when\n" +
" TestObject(testEnum == TestEnum.AAA)\n" +
"then\n" +
"end\n" +
"rule R2 when\n" +
" TestObject(testEnum == TestEnum.AAA)\n" +
"then\n" +
"end\n" +
"rule R3 when\n" +
" TestObject(testEnum == TestEnum.BBB)\n" +
"then\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("sharing-test", kieBaseTestConfiguration, drl);
ObjectTypeNode otn = getObjectTypeNode(kbase, TestObject.class);
assertSinksSize(otn, 2); // shared
assertHashableSinksSize(otn, 0); // not hash indexable
final KieSession kieSession = kbase.newKieSession();
try {
kieSession.insert(new TestObject(TestEnum.AAA));
assertEquals(2, kieSession.fireAllRules());
} finally {
kieSession.dispose();
}
}
@Test
public void testDontShareAlphaWithBigDecimalConstructor() {
// DROOLS-6418
final String drl = "package com.example;\n" +
"import " + Person.class.getCanonicalName() + "\n" +
"import " + BigDecimal.class.getCanonicalName() + "\n" +
"rule R1 when\n" +
" Person(salary == 1)\n" +
"then\n" +
"end\n" +
"rule R2 when\n" +
" Person(salary == new BigDecimal(\"1\"))\n" + // known constructor... always returns 1.
"then\n" +
"end\n" +
"rule R3 when\n" +
" Person(salary == 0)\n" +
"then\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("sharing-test", kieBaseTestConfiguration, drl);
ObjectTypeNode otn = getObjectTypeNode(kbase, Person.class);
assertSinksSize(otn, 3); // Not shared. For improvement, see DROOLS-6485
assertHashableSinksSize(otn, 2);
assertNonHashableConstraint(otn, "salary == new BigDecimal(\"1\")");
final KieSession kieSession = kbase.newKieSession();
try {
kieSession.insert(new Person("John", 20, new BigDecimal("1")));
assertEquals(2, kieSession.fireAllRules());
} finally {
kieSession.dispose();
}
}
@Test
public void testShouldAlphaShareNotEqualsInDifferentPackages() {
// DROOLS-1404
final String drl1 = "package c;\n" +
"import " + TestObject.class.getCanonicalName() + "\n" +
"rule fileArule1 when\n" +
" TestObject(value >= 1 )\n" +
"then\n" +
"end\n" +
"";
final String drl2 = "package iTzXzx;\n" + // <<- keep the different package
"import " + TestObject.class.getCanonicalName() + "\n" +
"rule fileBrule1 when\n" +
" TestObject(value >= 1 )\n" +
"then\n" +
"end\n" +
"rule fileBrule2 when\n" + // <<- keep this rule
" TestObject(value >= 2 )\n" +
"then\n" +
"end\n" +
"";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("sharing-test", kieBaseTestConfiguration, drl1, drl2);
ObjectTypeNode otn = getObjectTypeNode(kbase, TestObject.class);
assertSinksSize(otn, 2); // shared
final KieSession kieSession = kbase.newKieSession();
try {
kieSession.insert(new TestObject(1));
assertEquals(2, kieSession.fireAllRules());
} finally {
kieSession.dispose();
}
}
@Test
public void testShouldAlphaShareNotEqualsInDifferentPackages2() {
// DROOLS-1404
final String drl1 = "package c;\n" +
"import " + FactWithList.class.getCanonicalName() + "\n" +
"\n" +
"rule fileArule1 when\n" +
" FactWithList(items contains \"test\")\n" +
"then\n" +
"end\n" +
"";
final String drl2 = "package iTzXzx;\n" + // <<- keep the different package
"import " + FactWithList.class.getCanonicalName() + "\n" +
"rule fileBrule1 when\n" +
" FactWithList(items contains \"test\")\n" +
"then\n" +
"end\n" +
"rule fileBrule2 when\n" + // <<- keep this rule
" FactWithList(items contains \"testtest\")\n" +
"then\n" +
"end\n" +
"";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("sharing-test", kieBaseTestConfiguration, drl1, drl2);
ObjectTypeNode otn = getObjectTypeNode(kbase, FactWithList.class);
assertSinksSize(otn, 2); // shared
final KieSession kieSession = kbase.newKieSession();
try {
final FactWithList factWithList = new FactWithList("test");
kieSession.insert(factWithList);
assertEquals(2, kieSession.fireAllRules());
} finally {
kieSession.dispose();
}
}
@Test
public void testSubnetworkSharing() {
final String drl =
"import " + A.class.getCanonicalName() + "\n" +
"import " + B.class.getCanonicalName() + "\n" +
"global java.util.List list" +
"\n" +
"rule R1 agenda-group \"G2\" when\n" +
" Number( intValue < 1 ) from accumulate (\n" +
" A( $id : id )\n" +
" and $b : B( parentId == $id )\n" +
" ;count($b))\n" +
"then\n" +
" list.add(\"R1\");\n" +
"end\n" +
"\n" +
"rule R2 agenda-group \"G1\" when\n" +
" Number( intValue < 1 ) from accumulate (\n" +
" A( $id : id )\n" +
" and $b : B( parentId == $id )\n" +
"\n" +
" ;count($b))\n" +
"then\n" +
" list.add(\"R2\");\n" +
"end\n" +
"\n" +
"rule R3 agenda-group \"G1\" no-loop when\n" +
" $a : A( $id : id )\n" +
"then\n" +
" modify($a) { setId($id + 1) };\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("sharing-test", kieBaseTestConfiguration, drl);
ObjectTypeNode otnA = getObjectTypeNode(kbase, A.class);
assertSinksSize(otnA, 2);
ObjectTypeNode otnB = getObjectTypeNode(kbase, B.class);
assertSinksSize(otnB, 1);
final KieSession kieSession = kbase.newKieSession();
try {
final List<String> list = new ArrayList<>();
kieSession.setGlobal("list", list);
kieSession.insert(new A(1));
kieSession.insert(new B(1));
final Agenda agenda = kieSession.getAgenda();
agenda.getAgendaGroup("G2").setFocus();
agenda.getAgendaGroup("G1").setFocus();
kieSession.fireAllRules();
assertEquals(2, list.size());
assertTrue(list.contains("R1"));
assertTrue(list.contains("R2"));
} finally {
kieSession.dispose();
}
}
public static class A {
private int id;
public A(final int id) {
this.id = id;
}
public int getId() {
return id;
}
public void setId(final int id) {
this.id = id;
}
}
public static class B {
private final int parentId;
public B(final int parentId) {
this.parentId = parentId;
}
public int getParentId() {
return parentId;
}
}
public static class TestObject {
private Integer value = -1;
private TestEnum testEnum = TestEnum.AAA;
public TestObject(Integer value) {
this.value = value;
}
public TestObject(TestEnum testEnum) {
this.testEnum = testEnum;
}
public Integer getValue() {
return value;
}
public TestEnum getTestEnum() {
return testEnum;
}
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Druid;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2003-2016 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Chant_HowlersMoon extends Chant
{
@Override public String ID() { return "Chant_HowlersMoon"; }
private final static String localizedName = CMLib.lang().L("Howlers Moon");
@Override public String name() { return localizedName; }
private final static String localizedStaticDisplay = CMLib.lang().L("(Howlers Moon)");
@Override public String displayText() { return localizedStaticDisplay; }
@Override public int abstractQuality(){ return Ability.QUALITY_MALICIOUS;}
@Override public int enchantQuality(){return Ability.QUALITY_INDIFFERENT;}
@Override protected int canAffectCode(){return CAN_MOBS|CAN_ROOMS;}
@Override protected int canTargetCode(){return 0;}
@Override public int classificationCode(){return Ability.ACODE_CHANT|Ability.DOMAIN_MOONALTERING;}
protected int ticksTicked=0;
protected int fromDir=-1;
@Override
public void unInvoke()
{
// undo the affects of this spell
if(!(affected instanceof MOB))
{
if(affected instanceof Room)
((Room)affected).showHappens(CMMsg.MSG_OK_VISUAL,L("The howlers moon sets."));
super.unInvoke();
return;
}
final MOB mob=(MOB)affected;
if(mob.amFollowing()==null)
CMLib.tracking().wanderAway(mob,true,false);
super.unInvoke();
if((canBeUninvoked())&&(mob.amFollowing()==null))
{
mob.tell(L("You are no longer under the howlers moon."));
if(mob.amDead())
mob.setLocation(null);
mob.destroy();
}
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
if(!super.tick(ticking,tickID))
return false;
if(affected==null)
return false;
if(affected instanceof Room)
{
final Room room=(Room)affected;
if(!room.getArea().getClimateObj().canSeeTheMoon(room,this))
unInvoke();
if((++ticksTicked)<20)
return true;
int numWolfs=0;
for(int i=0;i<room.numInhabitants();i++)
{
final MOB M=room.fetchInhabitant(i);
if((M!=null)
&&(M.isMonster())
&&(M.fetchEffect(ID())!=null))
numWolfs++;
}
if((numWolfs>5)||((invoker()!=null)&&(numWolfs>(invoker().phyStats().level()+(2*getXLEVELLevel(invoker())))/10)))
return true;
if(fromDir<0)
{
final Vector<Integer> choices=fillChoices(room);
if(choices.size()==0)
return true;
fromDir=choices.elementAt(CMLib.dice().roll(1,choices.size(),-1)).intValue();
}
if(fromDir>=0)
{
ticksTicked=0;
int level=CMLib.ableMapper().lowestQualifyingLevel(ID())+5;
if(invoker()!=null)
level=invoker().phyStats().level()+5+(2*getXLEVELLevel(invoker()));
final MOB target = determineMonster(invoker(),level);
final Room newRoom=room.getRoomInDir(fromDir);
final int opDir=Directions.getOpDirectionCode(fromDir);
target.bringToLife(newRoom,true);
CMLib.beanCounter().clearZeroMoney(target,null);
target.location().showOthers(target,null,CMMsg.MSG_OK_ACTION,L("<S-NAME> appears!"));
newRoom.recoverRoomStats();
target.setStartRoom(null);
CMLib.tracking().walk(target,opDir,false,false);
if(target.location()==room)
{
final int d=CMLib.dice().rollPercentage();
if((d<33)&&(invoker()!=null)&&(invoker().location()==room))
{
CMLib.commands().postFollow(target,invoker(),true);
beneficialAffect(invoker(),target,0,0);
if(target.amFollowing()!=invoker())
target.setVictim(invoker());
}
else
if((d>66)&&(invoker()!=null)&&(invoker().location()==room))
target.setVictim(invoker());
beneficialAffect(target,target,0,Ability.TICKS_ALMOST_FOREVER);
}
else
{
if(target.amDead())
target.setLocation(null);
target.destroy();
}
}
}
return true;
}
protected Vector<Integer> fillChoices(Room R)
{
final Vector<Integer> choices=new Vector<Integer>();
for(int d=Directions.NUM_DIRECTIONS()-1;d>=0;d--)
{
final Room room=R.getRoomInDir(d);
final Exit exit=R.getExitInDir(d);
final Exit opExit=R.getReverseExit(d);
if((room!=null)
&&((room.domainType()&Room.INDOORS)==0)
&&(room.domainType()!=Room.DOMAIN_OUTDOORS_AIR)
&&((exit!=null)&&(exit.isOpen()))
&&(opExit!=null)&&(opExit.isOpen()))
choices.addElement(Integer.valueOf(d));
}
return choices;
}
@Override
public int castingQuality(MOB mob, Physical target)
{
if(mob!=null)
{
final Room R=mob.location();
if(R!=null)
{
if(!R.getArea().getClimateObj().canSeeTheMoon(R,null))
return Ability.QUALITY_INDIFFERENT;
for(final Enumeration<Ability> a=R.effects();a.hasMoreElements();)
{
final Ability A=a.nextElement();
if((A!=null)
&&((A.classificationCode()&Ability.ALL_DOMAINS)==Ability.DOMAIN_MOONALTERING))
return Ability.QUALITY_INDIFFERENT;
}
final Vector<Integer> choices=fillChoices(R);
if(choices.size()==0)
return Ability.QUALITY_INDIFFERENT;
}
}
return super.castingQuality(mob,target);
}
@Override
public boolean invoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel)
{
final Room target=mob.location();
if(target==null)
return false;
if(!target.getArea().getClimateObj().canSeeTheMoon(target,null))
{
mob.tell(L("You must be able to see the moon for this magic to work."));
return false;
}
if(target.fetchEffect(ID())!=null)
{
mob.tell(L("This place is already under the howler's moon."));
return false;
}
for(final Enumeration<Ability> a=target.effects();a.hasMoreElements();)
{
final Ability A=a.nextElement();
if((A!=null)
&&((A.classificationCode()&Ability.ALL_DOMAINS)==Ability.DOMAIN_MOONALTERING))
{
mob.tell(L("The moon is already under @x1, and can not be changed until this magic is gone.",A.name()));
return false;
}
}
if((mob.location().domainType()&Room.INDOORS)>0)
{
mob.tell(L("You must be outdoors for this chant to work."));
return false;
}
final Vector<Integer> choices=fillChoices(mob.location());
fromDir=-1;
if(choices.size()==0)
{
mob.tell(L("You must be further outdoors to summon an animal."));
return false;
}
fromDir=choices.elementAt(CMLib.dice().roll(1,choices.size(),-1)).intValue();
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
invoker=mob;
final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?"":L("^S<S-NAME> chant(s) to the sky.^?"));
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
if(msg.value()<=0)
{
mob.location().showHappens(CMMsg.MSG_OK_VISUAL,L("The Howler's Moon Rises!"));
ticksTicked=0;
beneficialAffect(mob,target,asLevel,0);
}
}
}
else
return maliciousFizzle(mob,target,L("<S-NAME> chant(s) to the sky, but the magic fades."));
// return whether it worked
return success;
}
public MOB determineMonster(MOB caster, int level)
{
final MOB newMOB=CMClass.getMOB("GenMob");
newMOB.basePhyStats().setAbility(0);
newMOB.basePhyStats().setLevel(level);
CMLib.factions().setAlignment(newMOB,Faction.Align.NEUTRAL);
newMOB.basePhyStats().setWeight(350);
newMOB.basePhyStats().setRejuv(PhyStats.NO_REJUV);
newMOB.baseCharStats().setMyRace(CMClass.getRace("Wolf"));
newMOB.baseCharStats().getMyRace().startRacing(newMOB,false);
newMOB.baseCharStats().setStat(CharStats.STAT_GENDER,'M');
newMOB.recoverPhyStats();
newMOB.recoverCharStats();
newMOB.basePhyStats().setArmor(CMLib.leveler().getLevelMOBArmor(newMOB));
newMOB.basePhyStats().setAttackAdjustment(CMLib.leveler().getLevelAttack(newMOB));
newMOB.basePhyStats().setDamage(CMLib.leveler().getLevelMOBDamage(newMOB));
newMOB.basePhyStats().setSpeed(CMLib.leveler().getLevelMOBSpeed(newMOB));
newMOB.setName(L("a ferocious wolf"));
newMOB.setDisplayText(L("a huge, ferocious wolf is here"));
newMOB.setDescription(L("Dark black fur, always standing on end surrounds its muscular body. The eyes are deep red, and his teeth are bared, snarling at you."));
newMOB.addNonUninvokableEffect(CMClass.getAbility("Prop_ModExperience"));
Behavior B=CMClass.getBehavior("CorpseEater");
if(B!=null)
newMOB.addBehavior(B);
B=CMClass.getBehavior("Emoter");
if(B!=null)
{
B.setParms("broadcast sound min=3 max=10 chance=80;howls at the moon.");
newMOB.addBehavior(B);
}
newMOB.recoverCharStats();
newMOB.recoverPhyStats();
newMOB.recoverMaxState();
newMOB.resetToMaxState();
newMOB.setStartRoom(null);
newMOB.text();
return(newMOB);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.rest.handler.legacy.backpressure;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.runtime.akka.AkkaJobManagerGateway;
import org.apache.flink.runtime.akka.AkkaUtils;
import org.apache.flink.runtime.client.JobClient;
import org.apache.flink.runtime.execution.Environment;
import org.apache.flink.runtime.executiongraph.ExecutionGraph;
import org.apache.flink.runtime.executiongraph.ExecutionJobVertex;
import org.apache.flink.runtime.highavailability.HighAvailabilityServices;
import org.apache.flink.runtime.highavailability.HighAvailabilityServicesUtils;
import org.apache.flink.runtime.instance.ActorGateway;
import org.apache.flink.runtime.instance.AkkaActorGateway;
import org.apache.flink.runtime.io.network.buffer.Buffer;
import org.apache.flink.runtime.io.network.buffer.BufferBuilder;
import org.apache.flink.runtime.io.network.buffer.BufferBuilderTestUtils;
import org.apache.flink.runtime.io.network.buffer.BufferPool;
import org.apache.flink.runtime.io.network.buffer.NetworkBufferPool;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
import org.apache.flink.runtime.messages.JobManagerMessages;
import org.apache.flink.runtime.testingUtils.TestingJobManagerMessages;
import org.apache.flink.runtime.testingUtils.TestingUtils;
import org.apache.flink.util.TestLogger;
import akka.actor.ActorSystem;
import akka.testkit.JavaTestKit;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import scala.concurrent.duration.FiniteDuration;
import static org.apache.flink.runtime.testingUtils.TestingJobManagerMessages.AllVerticesRunning;
import static org.apache.flink.runtime.testingUtils.TestingJobManagerMessages.ExecutionGraphFound;
import static org.apache.flink.runtime.testingUtils.TestingJobManagerMessages.RequestExecutionGraph;
import static org.apache.flink.runtime.testingUtils.TestingJobManagerMessages.WaitForAllVerticesToBeRunning;
/**
* Simple back pressured task test.
*/
public class BackPressureStatsTrackerImplITCase extends TestLogger {
private static NetworkBufferPool networkBufferPool;
private static ActorSystem testActorSystem;
/** Shared as static variable with the test task. */
private static BufferPool testBufferPool;
@BeforeClass
public static void setup() {
testActorSystem = AkkaUtils.createLocalActorSystem(new Configuration());
networkBufferPool = new NetworkBufferPool(100, 8192);
}
@AfterClass
public static void teardown() {
JavaTestKit.shutdownActorSystem(testActorSystem);
networkBufferPool.destroyAllBufferPools();
networkBufferPool.destroy();
}
/**
* Tests a simple fake-back pressured task. Back pressure is assumed when
* sampled stack traces are in blocking buffer requests.
*/
@Test
public void testBackPressuredProducer() throws Exception {
new JavaTestKit(testActorSystem) {{
final FiniteDuration deadline = new FiniteDuration(60, TimeUnit.SECONDS);
// The JobGraph
final JobGraph jobGraph = new JobGraph();
final int parallelism = 4;
final JobVertex task = new JobVertex("Task");
task.setInvokableClass(BackPressuredTask.class);
task.setParallelism(parallelism);
jobGraph.addVertex(task);
final Configuration config = new Configuration();
final HighAvailabilityServices highAvailabilityServices = HighAvailabilityServicesUtils.createAvailableOrEmbeddedServices(
config,
TestingUtils.defaultExecutor());
ActorGateway jobManger = null;
ActorGateway taskManager = null;
//
// 1) Consume all buffers at first (no buffers for the test task)
//
testBufferPool = networkBufferPool.createBufferPool(1, Integer.MAX_VALUE);
final List<Buffer> buffers = new ArrayList<>();
while (true) {
Buffer buffer = testBufferPool.requestBuffer();
if (buffer != null) {
buffers.add(buffer);
} else {
break;
}
}
try {
jobManger = TestingUtils.createJobManager(
testActorSystem,
TestingUtils.defaultExecutor(),
TestingUtils.defaultExecutor(),
config,
highAvailabilityServices);
config.setInteger(TaskManagerOptions.NUM_TASK_SLOTS, parallelism);
taskManager = TestingUtils.createTaskManager(
testActorSystem,
highAvailabilityServices,
config,
true,
true);
final ActorGateway jm = jobManger;
new Within(deadline) {
@Override
protected void run() {
try {
ActorGateway testActor = new AkkaActorGateway(getTestActor(), HighAvailabilityServices.DEFAULT_LEADER_ID);
// Submit the job and wait until it is running
JobClient.submitJobDetached(
new AkkaJobManagerGateway(jm),
config,
jobGraph,
Time.milliseconds(deadline.toMillis()),
ClassLoader.getSystemClassLoader());
jm.tell(new WaitForAllVerticesToBeRunning(jobGraph.getJobID()), testActor);
expectMsgEquals(new AllVerticesRunning(jobGraph.getJobID()));
// Get the ExecutionGraph
jm.tell(new RequestExecutionGraph(jobGraph.getJobID()), testActor);
ExecutionGraphFound executionGraphResponse =
expectMsgClass(ExecutionGraphFound.class);
ExecutionGraph executionGraph = (ExecutionGraph) executionGraphResponse.executionGraph();
ExecutionJobVertex vertex = executionGraph.getJobVertex(task.getID());
StackTraceSampleCoordinator coordinator = new StackTraceSampleCoordinator(
testActorSystem.dispatcher(), 60000);
// Verify back pressure (clean up interval can be ignored)
BackPressureStatsTrackerImpl statsTracker = new BackPressureStatsTrackerImpl(
coordinator,
100 * 1000,
20,
Integer.MAX_VALUE,
Time.milliseconds(10L));
int numAttempts = 10;
int nextSampleId = 0;
// Verify that all tasks are back pressured. This
// can fail if the task takes longer to request
// the buffer.
for (int attempt = 0; attempt < numAttempts; attempt++) {
try {
OperatorBackPressureStats stats = triggerStatsSample(statsTracker, vertex);
Assert.assertEquals(nextSampleId + attempt, stats.getSampleId());
Assert.assertEquals(parallelism, stats.getNumberOfSubTasks());
Assert.assertEquals(1.0, stats.getMaxBackPressureRatio(), 0.0);
for (int i = 0; i < parallelism; i++) {
Assert.assertEquals(1.0, stats.getBackPressureRatio(i), 0.0);
}
nextSampleId = stats.getSampleId() + 1;
break;
} catch (Throwable t) {
if (attempt == numAttempts - 1) {
throw t;
} else {
Thread.sleep(500);
}
}
}
//
// 2) Release all buffers and let the tasks grab one
//
for (Buffer buf : buffers) {
buf.recycleBuffer();
Assert.assertTrue(buf.isRecycled());
}
// Wait for all buffers to be available. The tasks
// grab them and then immediately release them.
while (testBufferPool.getNumberOfAvailableMemorySegments() < 100) {
Thread.sleep(100);
}
// Verify that no task is back pressured any more.
for (int attempt = 0; attempt < numAttempts; attempt++) {
try {
OperatorBackPressureStats stats = triggerStatsSample(statsTracker, vertex);
Assert.assertEquals(nextSampleId + attempt, stats.getSampleId());
Assert.assertEquals(parallelism, stats.getNumberOfSubTasks());
// Verify that no task is back pressured
for (int i = 0; i < parallelism; i++) {
Assert.assertEquals(0.0, stats.getBackPressureRatio(i), 0.0);
}
break;
} catch (Throwable t) {
if (attempt == numAttempts - 1) {
throw t;
} else {
Thread.sleep(500);
}
}
}
// Shut down
jm.tell(new TestingJobManagerMessages.NotifyWhenJobRemoved(jobGraph.getJobID()), testActor);
// Cancel job
jm.tell(new JobManagerMessages.CancelJob(jobGraph.getJobID()));
// Response to removal notification
expectMsgEquals(true);
//
// 3) Trigger stats for archived job
//
statsTracker.invalidateOperatorStatsCache();
Assert.assertFalse("Unexpected trigger", statsTracker.triggerStackTraceSample(vertex));
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
};
} finally {
TestingUtils.stopActor(jobManger);
TestingUtils.stopActor(taskManager);
highAvailabilityServices.closeAndCleanupAllData();
testBufferPool.lazyDestroy();
}
}};
}
/**
* Triggers a new stats sample.
*/
private OperatorBackPressureStats triggerStatsSample(
BackPressureStatsTrackerImpl statsTracker,
ExecutionJobVertex vertex) throws InterruptedException {
statsTracker.invalidateOperatorStatsCache();
Assert.assertTrue("Failed to trigger", statsTracker.triggerStackTraceSample(vertex));
// Sleep minimum duration
Thread.sleep(20 * 10);
Optional<OperatorBackPressureStats> stats;
// Get the stats
while (!(stats = statsTracker.getOperatorBackPressureStats(vertex)).isPresent()) {
Thread.sleep(10);
}
return stats.get();
}
/**
* A back pressured producer sharing a {@link BufferPool} with the
* test driver.
*/
public static class BackPressuredTask extends AbstractInvokable {
public BackPressuredTask(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
while (true) {
final BufferBuilder bufferBuilder = testBufferPool.requestBufferBuilderBlocking();
// Got a buffer, yay!
BufferBuilderTestUtils.buildSingleBuffer(bufferBuilder).recycleBuffer();
new CountDownLatch(1).await();
}
}
}
}
| |
/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.react.modules.core;
import javax.annotation.Nullable;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import android.util.SparseArray;
import android.view.Choreographer;
import com.facebook.infer.annotation.Assertions;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ExecutorToken;
import com.facebook.react.bridge.LifecycleEventListener;
import com.facebook.react.bridge.OnExecutorUnregisteredListener;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.common.SystemClock;
import com.facebook.react.uimanager.ReactChoreographer;
/**
* Native module for JS timer execution. Timers fire on frame boundaries.
*/
public final class Timing extends ReactContextBaseJavaModule implements LifecycleEventListener,
OnExecutorUnregisteredListener {
private static class Timer {
private final ExecutorToken mExecutorToken;
private final int mCallbackID;
private final boolean mRepeat;
private final int mInterval;
private long mTargetTime;
private Timer(
ExecutorToken executorToken,
int callbackID,
long initialTargetTime,
int duration,
boolean repeat) {
mExecutorToken = executorToken;
mCallbackID = callbackID;
mTargetTime = initialTargetTime;
mInterval = duration;
mRepeat = repeat;
}
}
private class FrameCallback implements Choreographer.FrameCallback {
// Temporary map for constructing the individual arrays of timers per ExecutorToken
private final HashMap<ExecutorToken, WritableArray> mTimersToCall = new HashMap<>();
/**
* Calls all timers that have expired since the last time this frame callback was called.
*/
@Override
public void doFrame(long frameTimeNanos) {
if (isPaused.get()) {
return;
}
long frameTimeMillis = frameTimeNanos / 1000000;
synchronized (mTimerGuard) {
while (!mTimers.isEmpty() && mTimers.peek().mTargetTime < frameTimeMillis) {
Timer timer = mTimers.poll();
WritableArray timersForContext = mTimersToCall.get(timer.mExecutorToken);
if (timersForContext == null) {
timersForContext = Arguments.createArray();
mTimersToCall.put(timer.mExecutorToken, timersForContext);
}
timersForContext.pushInt(timer.mCallbackID);
if (timer.mRepeat) {
timer.mTargetTime = frameTimeMillis + timer.mInterval;
mTimers.add(timer);
} else {
mTimerIdsToTimers.remove(timer.mCallbackID);
}
}
}
for (Map.Entry<ExecutorToken, WritableArray> entry : mTimersToCall.entrySet()) {
getReactApplicationContext().getJSModule(entry.getKey(), JSTimersExecution.class)
.callTimers(entry.getValue());
}
mTimersToCall.clear();
Assertions.assertNotNull(mReactChoreographer)
.postFrameCallback(ReactChoreographer.CallbackType.TIMERS_EVENTS, this);
}
}
private final Object mTimerGuard = new Object();
private final PriorityQueue<Timer> mTimers;
private final HashMap<ExecutorToken, SparseArray<Timer>> mTimerIdsToTimers;
private final AtomicBoolean isPaused = new AtomicBoolean(true);
private final FrameCallback mFrameCallback = new FrameCallback();
private @Nullable ReactChoreographer mReactChoreographer;
private boolean mFrameCallbackPosted = false;
public Timing(ReactApplicationContext reactContext) {
super(reactContext);
// We store timers sorted by finish time.
mTimers = new PriorityQueue<Timer>(
11, // Default capacity: for some reason they don't expose a (Comparator) constructor
new Comparator<Timer>() {
@Override
public int compare(Timer lhs, Timer rhs) {
long diff = lhs.mTargetTime - rhs.mTargetTime;
if (diff == 0) {
return 0;
} else if (diff < 0) {
return -1;
} else {
return 1;
}
}
});
mTimerIdsToTimers = new HashMap<>();
}
@Override
public void initialize() {
// Safe to acquire choreographer here, as initialize() is invoked from UI thread.
mReactChoreographer = ReactChoreographer.getInstance();
getReactApplicationContext().addLifecycleEventListener(this);
}
@Override
public void onHostPause() {
isPaused.set(true);
clearChoreographerCallback();
}
@Override
public void onHostDestroy() {
clearChoreographerCallback();
}
@Override
public void onHostResume() {
isPaused.set(false);
// TODO(5195192) Investigate possible problems related to restarting all tasks at the same
// moment
setChoreographerCallback();
}
@Override
public void onCatalystInstanceDestroy() {
clearChoreographerCallback();
}
private void setChoreographerCallback() {
if (!mFrameCallbackPosted) {
Assertions.assertNotNull(mReactChoreographer).postFrameCallback(
ReactChoreographer.CallbackType.TIMERS_EVENTS,
mFrameCallback);
mFrameCallbackPosted = true;
}
}
private void clearChoreographerCallback() {
if (mFrameCallbackPosted) {
Assertions.assertNotNull(mReactChoreographer).removeFrameCallback(
ReactChoreographer.CallbackType.TIMERS_EVENTS,
mFrameCallback);
mFrameCallbackPosted = false;
}
}
@Override
public String getName() {
return "RKTiming";
}
@Override
public boolean supportsWebWorkers() {
return true;
}
@Override
public void onExecutorDestroyed(ExecutorToken executorToken) {
synchronized (mTimerGuard) {
SparseArray<Timer> timersForContext = mTimerIdsToTimers.remove(executorToken);
if (timersForContext == null) {
return;
}
for (int i = 0; i < timersForContext.size(); i++) {
Timer timer = timersForContext.get(timersForContext.keyAt(i));
mTimers.remove(timer);
}
}
}
@ReactMethod
public void createTimer(
ExecutorToken executorToken,
final int callbackID,
final int duration,
final double jsSchedulingTime,
final boolean repeat) {
// Adjust for the amount of time it took for native to receive the timer registration call
long adjustedDuration = (long) Math.max(
0,
jsSchedulingTime - SystemClock.currentTimeMillis() + duration);
if (duration == 0 && !repeat) {
WritableArray timerToCall = Arguments.createArray();
timerToCall.pushInt(callbackID);
getReactApplicationContext().getJSModule(executorToken, JSTimersExecution.class)
.callTimers(timerToCall);
return;
}
long initialTargetTime = SystemClock.nanoTime() / 1000000 + adjustedDuration;
Timer timer = new Timer(executorToken, callbackID, initialTargetTime, duration, repeat);
synchronized (mTimerGuard) {
mTimers.add(timer);
SparseArray<Timer> timersForContext = mTimerIdsToTimers.get(executorToken);
if (timersForContext == null) {
timersForContext = new SparseArray<>();
mTimerIdsToTimers.put(executorToken, timersForContext);
}
timersForContext.put(callbackID, timer);
}
}
@ReactMethod
public void deleteTimer(ExecutorToken executorToken, int timerId) {
synchronized (mTimerGuard) {
SparseArray<Timer> timersForContext = mTimerIdsToTimers.get(executorToken);
if (timersForContext == null) {
return;
}
Timer timer = timersForContext.get(timerId);
if (timer == null) {
return;
}
// We may have already called/removed it
mTimerIdsToTimers.remove(timerId);
mTimers.remove(timer);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.bundleplugin;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.regex.Pattern;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.plugin.MojoExecutionException;
import aQute.lib.osgi.Instruction;
import aQute.libg.header.OSGiHeader;
/**
* Apply clause-based filter over given dependencies
*
* @author <a href="mailto:dev@felix.apache.org">Felix Project Team</a>
*/
public abstract class AbstractDependencyFilter
{
private static final Pattern MISSING_KEY_PATTERN = Pattern.compile( "(^|,)\\p{Blank}*(!)?\\p{Blank}*([a-zA-Z]+=)" );
/**
* Dependency artifacts.
*/
private final Collection m_dependencyArtifacts;
public AbstractDependencyFilter( Collection dependencyArtifacts )
{
m_dependencyArtifacts = dependencyArtifacts;
}
private static abstract class DependencyFilter
{
private final Instruction m_instruction;
private final String m_defaultValue;
public DependencyFilter( String expression )
{
this( expression, "" );
}
public DependencyFilter( String expression, String defaultValue )
{
m_instruction = Instruction.getPattern( expression );
m_defaultValue = defaultValue;
}
public void filter( Collection dependencies )
{
for ( Iterator i = dependencies.iterator(); i.hasNext(); )
{
if ( false == matches( ( Artifact ) i.next() ) )
{
i.remove();
}
}
}
abstract boolean matches( Artifact dependency );
boolean matches( String text )
{
boolean result;
if ( null == text )
{
result = m_instruction.matches( m_defaultValue );
}
else
{
result = m_instruction.matches( text );
}
return m_instruction.isNegated() ? !result : result;
}
}
protected final void processInstructions( String header ) throws MojoExecutionException
{
Map instructions = OSGiHeader.parseHeader( MISSING_KEY_PATTERN.matcher( header ).replaceAll( "$1$2*;$3" ) );
Collection availableDependencies = new LinkedHashSet( m_dependencyArtifacts );
DependencyFilter filter;
for ( Iterator clauseIterator = instructions.entrySet().iterator(); clauseIterator.hasNext(); )
{
String inline = "false";
// always start with a fresh *modifiable* collection for each unique clause
Collection filteredDependencies = new LinkedHashSet( availableDependencies );
// CLAUSE: REGEXP --> { ATTRIBUTE MAP }
Map.Entry clause = ( Map.Entry ) clauseIterator.next();
String primaryKey = ( ( String ) clause.getKey() ).replaceFirst( "~+$", "" );
boolean isNegative = primaryKey.startsWith( "!" );
if ( isNegative )
{
primaryKey = primaryKey.substring( 1 );
}
if ( !"*".equals( primaryKey ) )
{
filter = new DependencyFilter( primaryKey )
{
boolean matches( Artifact dependency )
{
return super.matches( dependency.getArtifactId() );
}
};
// FILTER ON MAIN CLAUSE
filter.filter( filteredDependencies );
}
for ( Iterator attrIterator = ( ( Map ) clause.getValue() ).entrySet().iterator(); attrIterator.hasNext(); )
{
// ATTRIBUTE: KEY --> REGEXP
Map.Entry attr = ( Map.Entry ) attrIterator.next();
if ( "groupId".equals( attr.getKey() ) )
{
filter = new DependencyFilter( ( String ) attr.getValue() )
{
boolean matches( Artifact dependency )
{
return super.matches( dependency.getGroupId() );
}
};
}
else if ( "artifactId".equals( attr.getKey() ) )
{
filter = new DependencyFilter( ( String ) attr.getValue() )
{
boolean matches( Artifact dependency )
{
return super.matches( dependency.getArtifactId() );
}
};
}
else if ( "version".equals( attr.getKey() ) )
{
filter = new DependencyFilter( ( String ) attr.getValue() )
{
boolean matches( Artifact dependency )
{
try
{
// use the symbolic version if available (ie. 1.0.0-SNAPSHOT)
return super.matches( dependency.getSelectedVersion().toString() );
}
catch ( Exception e )
{
return super.matches( dependency.getVersion() );
}
}
};
}
else if ( "scope".equals( attr.getKey() ) )
{
filter = new DependencyFilter( ( String ) attr.getValue(), "compile" )
{
boolean matches( Artifact dependency )
{
return super.matches( dependency.getScope() );
}
};
}
else if ( "type".equals( attr.getKey() ) )
{
filter = new DependencyFilter( ( String ) attr.getValue(), "jar" )
{
boolean matches( Artifact dependency )
{
return super.matches( dependency.getType() );
}
};
}
else if ( "classifier".equals( attr.getKey() ) )
{
filter = new DependencyFilter( ( String ) attr.getValue() )
{
boolean matches( Artifact dependency )
{
return super.matches( dependency.getClassifier() );
}
};
}
else if ( "optional".equals( attr.getKey() ) )
{
filter = new DependencyFilter( ( String ) attr.getValue(), "false" )
{
boolean matches( Artifact dependency )
{
return super.matches( "" + dependency.isOptional() );
}
};
}
else if ( "inline".equals( attr.getKey() ) )
{
inline = ( String ) attr.getValue();
continue;
}
else
{
throw new MojoExecutionException( "Unexpected attribute " + attr.getKey() );
}
// FILTER ON EACH ATTRIBUTE
filter.filter( filteredDependencies );
}
if ( isNegative )
{
// negative clauses reduce the set of available artifacts
availableDependencies.removeAll( filteredDependencies );
if ( !clauseIterator.hasNext() )
{
// assume there's an implicit * missing at the end
processDependencies( availableDependencies, inline );
}
}
else
{
// positive clause; doesn't alter the available artifacts
processDependencies( filteredDependencies, inline );
}
}
}
protected abstract void processDependencies( Collection dependencies, String inline );
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.spark.testing;
import com.facebook.airlift.log.Logger;
import com.facebook.presto.Session;
import com.facebook.presto.common.QualifiedObjectName;
import com.facebook.presto.hive.HiveHadoop2Plugin;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.spi.TableHandle;
import com.facebook.presto.spi.connector.ConnectorFactory;
import com.facebook.presto.testing.LocalQueryRunner;
import com.facebook.presto.testing.MaterializedResult;
import com.facebook.presto.tpch.TpchConnectorFactory;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.hash.Hashing;
import io.airlift.units.Duration;
import org.joda.time.DateTimeZone;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UncheckedIOException;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.TimeoutException;
import java.util.regex.Pattern;
import static com.facebook.presto.spark.testing.Processes.destroyProcess;
import static com.facebook.presto.testing.TestingSession.testSessionBuilder;
import static com.facebook.presto.tests.QueryAssertions.assertEqualsIgnoreOrder;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.io.Files.asByteSource;
import static com.google.common.io.Files.write;
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
import static com.google.common.io.Resources.getResource;
import static com.google.common.io.Resources.toByteArray;
import static java.lang.String.format;
import static java.lang.Thread.sleep;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.nio.file.Files.createDirectories;
import static java.util.UUID.randomUUID;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.stream.Collectors.joining;
import static org.apache.hadoop.net.NetUtils.addStaticResolution;
import static org.testng.Assert.assertEquals;
/**
* IMPORTANT!
* <p>
* Before running this test from an IDE, the project must be built with maven.
* <p>
* Please run:
* <p>
* ./mvnw clean install -pl presto-spark-launcher,presto-spark-package -am -DskipTests
* <p>
* from the project root after making any changes to the presto-spark-* codebase,
* otherwise this test may be running an old code version
*/
@Test(singleThreaded = true)
public class TestPrestoSparkLauncherIntegrationSmokeTest
{
private static final Logger log = Logger.get(TestPrestoSparkLauncherIntegrationSmokeTest.class);
private static final DateTimeZone TIME_ZONE = DateTimeZone.forID("America/Bahia_Banderas");
private File tempDir;
private File sparkWorkDirectory;
private DockerCompose dockerCompose;
private Process composeProcess;
private LocalQueryRunner localQueryRunner;
private File prestoLauncher;
private File prestoPackage;
private File configProperties;
private File catalogDirectory;
@BeforeClass
public void setUp()
throws Exception
{
assertEquals(DateTimeZone.getDefault(), TIME_ZONE, "Timezone not configured correctly. Add -Duser.timezone=America/Bahia_Banderas to your JVM arguments");
// the default temporary directory location on MacOS is not sharable to docker
tempDir = new File("/tmp", randomUUID().toString());
createDirectories(tempDir.toPath());
sparkWorkDirectory = new File(tempDir, "work");
createDirectories(sparkWorkDirectory.toPath());
File composeYaml = extractResource("docker-compose.yml", tempDir);
dockerCompose = new DockerCompose(composeYaml);
dockerCompose.verifyInstallation();
dockerCompose.pull();
composeProcess = dockerCompose.up(ImmutableMap.of(
"spark-master", 1,
"spark-worker", 2,
"hadoop-master", 1));
Session session = testSessionBuilder()
.setCatalog("hive")
.setSchema("default")
.build();
localQueryRunner = new LocalQueryRunner(session);
HiveHadoop2Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory hiveConnectorFactory = getOnlyElement(plugin.getConnectorFactories());
addStaticResolution("hadoop-master", "127.0.0.1");
String hadoopMasterAddress = dockerCompose.getContainerAddress("hadoop-master");
// datanode is accessed via the internal docker IP address that is not accessible from the host
addStaticResolution(hadoopMasterAddress, "127.0.0.1");
localQueryRunner.createCatalog(
"hive",
hiveConnectorFactory,
ImmutableMap.of(
"hive.metastore.uri", "thrift://127.0.0.1:9083",
"hive.time-zone", TIME_ZONE.getID(),
"hive.experimental-optimized-partition-update-serialization-enabled", "true"));
localQueryRunner.createCatalog("tpch", new TpchConnectorFactory(), ImmutableMap.of());
// it may take some time for the docker container to start
ensureHiveIsRunning(localQueryRunner, new Duration(10, MINUTES));
importTables(localQueryRunner, "lineitem", "orders");
importTablesBucketed(localQueryRunner, ImmutableList.of("orderkey"), "lineitem", "orders");
File projectRoot = resolveProjectRoot();
prestoLauncher = resolveFile(new File(projectRoot, "presto-spark-launcher/target"), Pattern.compile("presto-spark-launcher-[\\d\\.]+(-SNAPSHOT)?\\.jar"));
logPackageInfo(prestoLauncher);
prestoPackage = resolveFile(new File(projectRoot, "presto-spark-package/target"), Pattern.compile("presto-spark-package-.+\\.tar\\.gz"));
logPackageInfo(prestoPackage);
configProperties = new File(tempDir, "config.properties");
storeProperties(configProperties, ImmutableMap.of(
"query.hash-partition-count", "10"));
catalogDirectory = new File(tempDir, "catalogs");
createDirectories(catalogDirectory.toPath());
storeProperties(new File(catalogDirectory, "hive.properties"), ImmutableMap.of(
"connector.name", "hive-hadoop2",
"hive.metastore.uri", "thrift://hadoop-master:9083",
// hadoop native cannot be run within the spark docker container
// the getnetgrent dependency is missing
"hive.dfs.require-hadoop-native", "false",
"hive.time-zone", TIME_ZONE.getID()));
storeProperties(new File(catalogDirectory, "tpch.properties"), ImmutableMap.of(
"connector.name", "tpch",
"tpch.splits-per-node", "4",
"tpch.partitioning-enabled", "false"));
}
private static void ensureHiveIsRunning(LocalQueryRunner localQueryRunner, Duration timeout)
throws InterruptedException, TimeoutException
{
long deadline = System.currentTimeMillis() + timeout.toMillis();
while (System.currentTimeMillis() < deadline) {
if (tryCreateDummyTable(localQueryRunner)) {
return;
}
sleep(1000);
}
throw new TimeoutException(format("Not able to create a dummy table in hive after %s, most likely the hive docker service is down", timeout));
}
private static boolean tryCreateDummyTable(LocalQueryRunner localQueryRunner)
{
try {
localQueryRunner.execute("CREATE TABLE dummy_nation AS SELECT * FROM tpch.tiny.nation");
return true;
}
catch (RuntimeException e) {
String message = format("Failed to create dummy table: %s", e.getMessage());
if (log.isDebugEnabled()) {
log.debug(message, e);
}
else {
log.info(message);
}
return false;
}
}
private static void importTables(LocalQueryRunner localQueryRunner, String... tables)
{
for (String table : tables) {
localQueryRunner.execute(format("CREATE TABLE %s AS SELECT * FROM tpch.tiny.%s", table, table));
}
}
private static void importTablesBucketed(LocalQueryRunner localQueryRunner, List<String> bucketedBy, String... tables)
{
for (String table : tables) {
localQueryRunner.execute(format(
"CREATE TABLE %s_bucketed WITH (bucketed_by=array[%s], bucket_count=11) AS SELECT * FROM tpch.tiny.%s",
table,
bucketedBy.stream()
.map(value -> "'" + value + "'")
.collect(joining(",")),
table));
}
}
/**
* Spark has to deploy Presto on Spark package to every worker for every query.
* Unfortunately Spark doesn't try to eagerly delete application data from the workers, and after running
* a couple of queries the disk space utilization spikes.
* While this might not be an issue when testing locally the disk space is usually very limited on CI environments.
* To avoid issues when running on a CI environment we have to drop temporary application data eagerly after each test.
*/
@AfterMethod(alwaysRun = true)
public void cleanupSparkWorkDirectory()
throws Exception
{
if (sparkWorkDirectory != null) {
// Docker containers are run with a different user id. Run "rm" in a container to avoid permission related issues.
int exitCode = dockerCompose.run(
"-v", format("%s:/spark/work", sparkWorkDirectory.getAbsolutePath()),
"spark-submit",
"/bin/bash", "-c", "rm -rf /spark/work/*");
assertEquals(exitCode, 0);
}
}
@AfterClass(alwaysRun = true)
public void tearDown()
throws Exception
{
if (composeProcess != null) {
destroyProcess(composeProcess);
composeProcess = null;
}
if (dockerCompose != null) {
dockerCompose.down();
dockerCompose = null;
}
if (localQueryRunner != null) {
localQueryRunner.close();
localQueryRunner = null;
}
if (tempDir != null) {
deleteRecursively(tempDir.toPath(), ALLOW_INSECURE);
tempDir = null;
}
}
private void executeOnSpark(String query)
throws InterruptedException, IOException
{
File queryFile = new File(tempDir, randomUUID() + ".sql");
write(query.getBytes(UTF_8), queryFile);
int exitCode = dockerCompose.run(
"-v", format("%s:/presto/launcher.jar", prestoLauncher.getAbsolutePath()),
"-v", format("%s:/presto/package.tar.gz", prestoPackage.getAbsolutePath()),
"-v", format("%s:/presto/query.sql", queryFile.getAbsolutePath()),
"-v", format("%s:/presto/etc/config.properties", configProperties.getAbsolutePath()),
"-v", format("%s:/presto/etc/catalogs", catalogDirectory.getAbsolutePath()),
"spark-submit",
"/spark/bin/spark-submit",
"--executor-memory", "512m",
"--executor-cores", "4",
"--conf", "spark.task.cpus=4",
"--master", "spark://spark-master:7077",
"--class", "com.facebook.presto.spark.launcher.PrestoSparkLauncher",
"/presto/launcher.jar",
"--package", "/presto/package.tar.gz",
"--config", "/presto/etc/config.properties",
"--catalogs", "/presto/etc/catalogs",
"--catalog", "hive",
"--schema", "default",
"--file", "/presto/query.sql");
assertEquals(exitCode, 0);
}
private static File extractResource(String resource, File destinationDirectory)
{
File file = new File(destinationDirectory, Paths.get(resource).getFileName().toString());
try (FileOutputStream outputStream = new FileOutputStream(file)) {
outputStream.write(toByteArray(getResource(resource)));
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
return file;
}
private static File resolveProjectRoot()
{
File directory = new File(System.getProperty("user.dir"));
while (true) {
File prestoSparkTestingDirectory = new File(directory, "presto-spark-testing");
if (prestoSparkTestingDirectory.exists() && prestoSparkTestingDirectory.isDirectory()) {
return directory;
}
directory = directory.getParentFile();
if (directory == null) {
throw new IllegalStateException("working directory must be set to a directory within the presto project");
}
}
}
private static File resolveFile(File directory, Pattern pattern)
throws FileNotFoundException
{
checkArgument(directory.exists() && directory.isDirectory(), "directory does not exist: %s", directory);
List<File> result = new ArrayList<>();
for (File file : directory.listFiles()) {
if (pattern.matcher(file.getName()).matches()) {
result.add(file);
}
}
if (result.isEmpty()) {
throw new FileNotFoundException(format("directory %s doesn't contain a file that matches the given pattern: %s", directory, pattern));
}
if (result.size() > 1) {
throw new FileNotFoundException(format("directory %s contains multiple files that match the given pattern: %s", directory, pattern));
}
return getOnlyElement(result);
}
private static void logPackageInfo(File file)
throws IOException
{
long lastModified = file.lastModified();
log.info(
"%s size: %s modified: %s sha256sum: %s",
file,
file.length(),
new Date(lastModified),
asByteSource(file).hash(Hashing.sha256()).toString());
long minutesSinceLastModified = (System.currentTimeMillis() - lastModified) / 1000 / 60;
if (minutesSinceLastModified > 30) {
log.warn("%s was modified more than 30 minutes ago. " +
"This test doesn't trigger automatic build. " +
"After any changes are applied - the project must be completely rebuilt for the changes to take effect.", file);
}
}
private static void storeProperties(File file, Map<String, String> properties)
throws IOException
{
Properties p = new Properties();
p.putAll(properties);
try (OutputStream outputStream = new FileOutputStream(file)) {
p.store(outputStream, "");
}
}
@Test
public void testAggregation()
throws Exception
{
assertQuery("" +
"SELECT partkey, count(*) c " +
"FROM lineitem " +
"WHERE partkey % 10 = 1 " +
"GROUP BY partkey " +
"HAVING count(*) = 42");
}
@Test
public void testBucketedAggregation()
throws Exception
{
assertQuery("" +
"SELECT orderkey, count(*) c " +
"FROM lineitem_bucketed " +
"WHERE partkey % 10 = 1 " +
"GROUP BY orderkey");
}
@Test
public void testJoin()
throws Exception
{
assertQuery("" +
"SELECT l.orderkey, l.linenumber, o.orderstatus " +
"FROM lineitem l " +
"JOIN orders o " +
"ON l.orderkey = o.orderkey " +
"WHERE l.orderkey % 223 = 42 AND l.linenumber = 4 and o.orderstatus = 'O'");
}
@Test
public void testBucketedJoin()
throws Exception
{
assertQuery("" +
"SELECT l.orderkey, l.linenumber, o.orderstatus " +
"FROM lineitem_bucketed l " +
"JOIN orders_bucketed o " +
"ON l.orderkey = o.orderkey " +
"WHERE l.orderkey % 223 = 42 AND l.linenumber = 4 and o.orderstatus = 'O'");
}
@Test
public void testCrossJoin()
throws Exception
{
assertQuery("" +
"SELECT o.custkey, l.orderkey " +
"FROM (SELECT * FROM lineitem WHERE linenumber = 4) l " +
"CROSS JOIN (SELECT * FROM orders WHERE orderkey = 5) o");
}
@Test
public void testNWayJoin()
throws Exception
{
assertQuery("SELECT " +
" l.orderkey, " +
" l.linenumber, " +
" o1.orderstatus as orderstatus1, " +
" o2.orderstatus as orderstatus2, " +
" o3.orderstatus as orderstatus3, " +
" o4.orderstatus as orderstatus4, " +
" o5.orderstatus as orderstatus5, " +
" o6.orderstatus as orderstatus6 " +
"FROM lineitem l, orders o1, orders o2, orders o3, orders o4, orders o5, orders o6 " +
"WHERE l.orderkey = o1.orderkey " +
"AND l.orderkey = o2.orderkey " +
"AND l.orderkey = o3.orderkey " +
"AND l.orderkey = o4.orderkey " +
"AND l.orderkey = o5.orderkey " +
"AND l.orderkey = o6.orderkey");
}
@Test
public void testUnionAll()
throws Exception
{
assertQuery("SELECT * FROM orders UNION ALL SELECT * FROM orders");
}
@Test
public void testValues()
throws Exception
{
assertQuery("SELECT a, b " +
"FROM (VALUES (1, 'a'), (2, 'b'), (3, 'c'), (4, 'd')) t1 (a, b) ");
}
@Test
public void testUnionWithAggregationAndJoin()
throws Exception
{
assertQuery(
"SELECT t.orderkey, t.c, o.orderstatus FROM ( " +
"SELECT orderkey, count(*) as c FROM (" +
" SELECT orderdate ds, orderkey FROM orders " +
" UNION ALL " +
" SELECT shipdate ds, orderkey FROM lineitem) a " +
"GROUP BY orderkey) t " +
"JOIN orders o " +
"ON (o.orderkey = t.orderkey)");
}
@Test
public void testBucketedTableWrite()
throws Exception
{
executeOnSpark("CREATE TABLE test_orders_bucketed " +
"WITH (bucketed_by=array['orderkey'], bucket_count=11) " +
"AS SELECT * FROM orders_bucketed");
MaterializedResult actual = localQueryRunner.execute("SELECT * FROM test_orders_bucketed");
MaterializedResult expected = localQueryRunner.execute("SELECT * FROM orders_bucketed");
assertEqualsIgnoreOrder(actual, expected);
dropTable("test_orders_bucketed");
}
private void assertQuery(String query)
throws Exception
{
String tableName = "__tmp_" + randomUUID().toString().replaceAll("-", "_");
executeOnSpark(format("CREATE TABLE %s AS %s", tableName, query));
MaterializedResult actual = localQueryRunner.execute(format("SELECT * FROM %s", tableName));
MaterializedResult expected = localQueryRunner.execute(query);
assertEqualsIgnoreOrder(actual, expected);
dropTable(tableName);
}
private void dropTable(String table)
{
// LocalQueryRunner doesn't support DROP TABLE
localQueryRunner.inTransaction(localQueryRunner.getDefaultSession(), transactionSession -> {
Metadata metadata = localQueryRunner.getMetadata();
TableHandle tableHandle = metadata.getTableHandle(transactionSession, new QualifiedObjectName("hive", "default", table)).get();
metadata.dropTable(transactionSession, tableHandle);
return null;
});
}
}
| |
/*
* Copyright 2016 KairosDB Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kairosdb.datastore.cassandra;
import org.kairosdb.core.datapoints.LegacyDataPointFactory;
import org.kairosdb.util.StringPool;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.SortedMap;
public class DataPointsRowKeySerializer
{
public static final Charset UTF8 = Charset.forName("UTF-8");
private StringPool m_stringPool;
public DataPointsRowKeySerializer()
{
this(false);
}
public DataPointsRowKeySerializer(boolean poolStrings)
{
if (poolStrings)
m_stringPool = new StringPool();
}
/**
If we are pooling strings the string from the pool will be returned.
@param str string
@return returns the string or what's in the string pool if using a string pool
*/
private String getString(String str)
{
if (m_stringPool != null)
return (m_stringPool.getString(str));
else
return (str);
}
public ByteBuffer toByteBuffer(DataPointsRowKey dataPointsRowKey)
{
ByteBuffer buffer = dataPointsRowKey.getSerializedBuffer();
if (buffer != null)
{
buffer = buffer.duplicate();
}
else
{
int size = 8; //size of timestamp
byte[] metricName = dataPointsRowKey.getMetricName().getBytes(UTF8);
size += metricName.length;
size++; //Add one for null at end of string
//if the data type is null then we are creating a row key for the old
//format - this is for delete operations
byte[] dataType = null;
String dataTypeStr = dataPointsRowKey.getDataType();
if (!dataTypeStr.equals(LegacyDataPointFactory.DATASTORE_TYPE))
{
dataType = dataPointsRowKey.getDataType().getBytes(UTF8);
size += dataType.length;
size += 2; //for null marker and datatype size
}
byte[] tagString = generateTagString(dataPointsRowKey.getTags()).getBytes(UTF8);
size += tagString.length;
buffer = ByteBuffer.allocate(size);
buffer.put(metricName); //Metric name is put in this way for sorting purposes
buffer.put((byte) 0x0);
buffer.putLong(dataPointsRowKey.getTimestamp());
if (dataType != null)
{
if (dataPointsRowKey.isEndSearchKey())
buffer.put((byte) 0xFF); //Only used for serialization of end search keys
else
buffer.put((byte) 0x0); //Marks the beginning of datatype
buffer.put((byte) dataType.length);
buffer.put(dataType);
}
buffer.put(tagString);
buffer.flip();
dataPointsRowKey.setSerializedBuffer(buffer);
buffer = buffer.duplicate();
}
return buffer;
}
private StringBuilder escapeAppend(StringBuilder sb, String value, char escape)
{
int startPos = 0;
for (int i = 0; i < value.length(); i++)
{
char ch = value.charAt(i);
if (ch == ':' || ch == '=')
{
sb.append(value, startPos, i);
sb.append(escape).append(ch);
startPos = i + 1;
}
}
if (startPos <= value.length())
{
sb.append(value, startPos, value.length());
}
return sb;
}
private String unEscape(CharSequence source, int start, int end, char escape)
{
int startPos = start;
StringBuilder sb = new StringBuilder(end - start);
for (int i = start; i < end; i++)
{
char ch = source.charAt(i);
if (ch == escape)
{
sb.append(source, startPos, i);
i++; //Skip next char as it was escaped
startPos = i;
}
}
if (startPos <= end)
{
sb.append(source, startPos, end);
}
return sb.toString();
}
private String generateTagString(SortedMap<String, String> tags)
{
StringBuilder sb = new StringBuilder();
for (String key : tags.keySet())
{
//Escape tag names using :
escapeAppend(sb, key, ':').append("=");
//Escape tag values using =
escapeAppend(sb, tags.get(key), '=').append(":");
}
return (sb.toString());
}
private void extractTags(DataPointsRowKey rowKey, String tagString)
{
int mark = 0;
int position = 0;
String tag = null;
String value;
for (position = 0; position < tagString.length(); position ++)
{
if (tag == null)
{
if (tagString.charAt(position) == '=')
{
tag = unEscape(tagString, mark, position, ':');
mark = position +1;
}
if (tagString.charAt(position) == ':')
{
position ++;
}
}
else
{
if (tagString.charAt(position) == ':')
{
value = unEscape(tagString, mark, position, '=');
mark = position +1;
rowKey.addTag(getString(tag), getString(value));
tag = null;
}
if (tagString.charAt(position) == '=')
{
position ++;
}
}
}
}
public DataPointsRowKey fromByteBuffer(ByteBuffer byteBuffer, String clusterName)
{
int start = byteBuffer.position();
byteBuffer.mark();
//Find null
while (byteBuffer.get() != 0x0);
int nameSize = (byteBuffer.position() - start) -1;
byteBuffer.reset();
byte[] metricName = new byte[nameSize];
byteBuffer.get(metricName);
byteBuffer.get(); //Skip the null
long timestamp = byteBuffer.getLong();
//Check for datatype marker which ia a null
byteBuffer.mark();
//default to legacy type
String dataType = LegacyDataPointFactory.DATASTORE_TYPE;
if (byteBuffer.get() == 0x0)
{
int dtSize = byteBuffer.get();
byte[] dataTypeBytes = new byte[dtSize];
byteBuffer.get(dataTypeBytes);
dataType = new String(dataTypeBytes, UTF8);
}
else
{
byteBuffer.reset();
}
DataPointsRowKey rowKey = new DataPointsRowKey(getString(new String(metricName, UTF8)),
clusterName,
timestamp, dataType);
byte[] tagString = new byte[byteBuffer.remaining()];
byteBuffer.get(tagString);
String tags = new String(tagString, UTF8);
extractTags(rowKey, tags);
return rowKey;
}
}
| |
package org.drip.market.exchange;
/*
* -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*/
/*!
* Copyright (C) 2016 Lakshmi Krishnamurthy
*
* This file is part of DRIP, a free-software/open-source library for buy/side financial/trading model
* libraries targeting analysts and developers
* https://lakshmidrip.github.io/DRIP/
*
* DRIP is composed of four main libraries:
*
* - DRIP Fixed Income - https://lakshmidrip.github.io/DRIP-Fixed-Income/
* - DRIP Asset Allocation - https://lakshmidrip.github.io/DRIP-Asset-Allocation/
* - DRIP Numerical Optimizer - https://lakshmidrip.github.io/DRIP-Numerical-Optimizer/
* - DRIP Statistical Learning - https://lakshmidrip.github.io/DRIP-Statistical-Learning/
*
* - DRIP Fixed Income: Library for Instrument/Trading Conventions, Treasury Futures/Options,
* Funding/Forward/Overnight Curves, Multi-Curve Construction/Valuation, Collateral Valuation and XVA
* Metric Generation, Calibration and Hedge Attributions, Statistical Curve Construction, Bond RV
* Metrics, Stochastic Evolution and Option Pricing, Interest Rate Dynamics and Option Pricing, LMM
* Extensions/Calibrations/Greeks, Algorithmic Differentiation, and Asset Backed Models and Analytics.
*
* - DRIP Asset Allocation: Library for model libraries for MPT framework, Black Litterman Strategy
* Incorporator, Holdings Constraint, and Transaction Costs.
*
* - DRIP Numerical Optimizer: Library for Numerical Optimization and Spline Functionality.
*
* - DRIP Statistical Learning: Library for Statistical Evaluation and Machine Learning.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* TreasuryFuturesContractContainer holds the Details of some of the Common Treasury Futures Contracts.
*
* @author Lakshmi Krishnamurthy
*/
public class TreasuryFuturesContractContainer {
private static final
org.drip.analytics.support.CaseInsensitiveTreeMap<org.drip.market.exchange.TreasuryFuturesContract>
_mapNameContract = new
org.drip.analytics.support.CaseInsensitiveTreeMap<org.drip.market.exchange.TreasuryFuturesContract>();
private static final
org.drip.analytics.support.CaseInsensitiveTreeMap<org.drip.market.exchange.TreasuryFuturesContract>
_mapCodeTenorContract = new
org.drip.analytics.support.CaseInsensitiveTreeMap<org.drip.market.exchange.TreasuryFuturesContract>();
private static final boolean AddContract (
final java.lang.String[] astrName,
final java.lang.String strID,
final java.lang.String strCode,
final java.lang.String strType,
final java.lang.String strTenor)
{
try {
org.drip.market.exchange.TreasuryFuturesContract tfc = new
org.drip.market.exchange.TreasuryFuturesContract (strID, strCode, strType, strTenor);
for (java.lang.String strName : astrName)
_mapNameContract.put (strName, tfc);
_mapCodeTenorContract.put (strCode + "::" + strTenor, tfc);
return true;
} catch (java.lang.Exception e) {
e.printStackTrace();
}
return false;
}
/**
* Initialize the Treasury Futures Contract Container with the Conventions
*
* @return TRUE - The Treasury Futures Contracts Container successfully initialized with the Contracts
*/
public static final boolean Init()
{
/*
* Australian Treasury Futures
*/
if (!AddContract (new java.lang.String[] {"YM1"}, "YM1", "AGB", "NOTE", "03Y")) return false;
if (!AddContract (new java.lang.String[] {"XM1"}, "XM1", "AGB", "NOTE", "10Y")) return false;
/*
* Canadian Treasury Futures
*/
if (!AddContract (new java.lang.String[] {"CN1"}, "CN1", "CAN", "NOTE", "10Y")) return false;
/*
* Danish Treasury Futures
*/
if (!AddContract (new java.lang.String[] {"DGB"}, "DGB", "DGB", "NOTE", "10Y")) return false;
/*
* French Treasury Futures
*/
if (!AddContract (new java.lang.String[] {"OAT1"}, "OAT1", "FRTR", "NOTE", "10Y")) return false;
/*
* German Treasury Futures
*/
if (!AddContract (new java.lang.String[] {"DU1", "SCHATZ"}, "DU1", "DBR", "NOTE", "02Y"))
return false;
if (!AddContract (new java.lang.String[] {"OE1", "BOBL"}, "OE1", "DBR", "NOTE", "05Y")) return false;
if (!AddContract (new java.lang.String[] {"RX1", "BUND"}, "RX1", "DBR", "NOTE", "10Y")) return false;
if (!AddContract (new java.lang.String[] {"UB1", "BUXL"}, "UB1", "DBR", "NOTE", "30Y")) return false;
/*
* Italian Treasury Futures
*/
if (!AddContract (new java.lang.String[] {"IK1"}, "IK1", "BTPS", "NOTE", "10Y")) return false;
/*
* Japanese Treasury Futures
*/
if (!AddContract (new java.lang.String[] {"JB1"}, "JB1", "JGB", "NOTE", "10Y")) return false;
/*
* Spanish Treasury Futures
*/
if (!AddContract (new java.lang.String[] {"FBB1"}, "FBB1", "SPGB", "NOTE", "10Y")) return false;
/*
* Swiss Treasury Futures
*/
if (!AddContract (new java.lang.String[] {"GSWISS"}, "GSWISS", "GSWISS", "NOTE", "10Y"))
return false;
/*
* UK Treasury Futures
*/
if (!AddContract (new java.lang.String[] {"WB1"}, "WB1", "GILT", "NOTE", "02Y")) return false;
if (!AddContract (new java.lang.String[] {"G1"}, "G1", "GILT", "NOTE", "10Y")) return false;
/*
* US Treasury Futures
*/
if (!AddContract (new java.lang.String[] {"TU1"}, "TU1", "UST", "NOTE", "02Y")) return false;
if (!AddContract (new java.lang.String[] {"FV1"}, "FV1", "UST", "NOTE", "05Y")) return false;
if (!AddContract (new java.lang.String[] {"TY1"}, "TY1", "UST", "NOTE", "10Y")) return false;
if (!AddContract (new java.lang.String[] {"US1"}, "US1", "UST", "NOTE", "20Y")) return false;
if (!AddContract (new java.lang.String[] {"WN1", "ULTRA"}, "WN1", "UST", "NOTE", "30Y"))
return false;
return true;
}
/**
* Retrieve the Treasury Futures Contract by Name
*
* @param strTreasuryFuturesName The Treasury Futures Name
*
* @return The Treasury Futures Contract
*/
public static final org.drip.market.exchange.TreasuryFuturesContract TreasuryFuturesContract (
final java.lang.String strTreasuryFuturesName)
{
return !_mapNameContract.containsKey (strTreasuryFuturesName) ? null : _mapNameContract.get
(strTreasuryFuturesName);
}
/**
* Retrieve the Treasury Futures Contract by Code and Tenor
*
* @param strCode The Treasury Code
* @param strTenor The Futures Tenor
*
* @return The Treasury Futures Contract
*/
public static final org.drip.market.exchange.TreasuryFuturesContract TreasuryFuturesContract (
final java.lang.String strCode,
final java.lang.String strTenor)
{
java.lang.String strCodeTenor = strCode + "::" + strTenor;
return !_mapNameContract.containsKey (strCodeTenor) ? null : _mapNameContract.get (strCodeTenor);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.search;
import static com.google.common.truth.Truth.assertThat;
import static org.apache.zeppelin.search.LuceneSearch.formatId;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.List;
import java.util.Map;
import org.apache.zeppelin.conf.ZeppelinConfiguration;
import org.apache.zeppelin.interpreter.InterpreterFactory;
import org.apache.zeppelin.interpreter.InterpreterSetting;
import org.apache.zeppelin.interpreter.InterpreterSettingManager;
import org.apache.zeppelin.notebook.AuthorizationService;
import org.apache.zeppelin.notebook.Note;
import org.apache.zeppelin.notebook.NoteManager;
import org.apache.zeppelin.notebook.Notebook;
import org.apache.zeppelin.notebook.Paragraph;
import org.apache.zeppelin.notebook.repo.NotebookRepo;
import org.apache.zeppelin.user.AuthenticationInfo;
import org.apache.zeppelin.user.Credentials;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class LuceneSearchTest {
private Notebook notebook;
private InterpreterSettingManager interpreterSettingManager;
private LuceneSearch noteSearchService;
private File indexDir;
@Before
public void startUp() throws IOException {
indexDir = Files.createTempDirectory("lucene").toFile();
System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_SEARCH_INDEX_PATH.getVarName(), indexDir.getAbsolutePath());
noteSearchService = new LuceneSearch(ZeppelinConfiguration.create());
interpreterSettingManager = mock(InterpreterSettingManager.class);
InterpreterSetting defaultInterpreterSetting = mock(InterpreterSetting.class);
when(defaultInterpreterSetting.getName()).thenReturn("test");
when(interpreterSettingManager.getDefaultInterpreterSetting()).thenReturn(defaultInterpreterSetting);
notebook = new Notebook(ZeppelinConfiguration.create(), mock(AuthorizationService.class), mock(NotebookRepo.class), mock(NoteManager.class),
mock(InterpreterFactory.class), interpreterSettingManager,
noteSearchService,
mock(Credentials.class), null);
}
@After
public void shutDown() {
noteSearchService.close();
indexDir.delete();
}
private void drainSearchEvents() throws InterruptedException {
while (!noteSearchService.isEventQueueEmpty()) {
Thread.sleep(1000);
}
Thread.sleep(1000);
}
@Test
public void canIndexAndQuery() throws IOException, InterruptedException {
// given
Note note1 = newNoteWithParagraph("Notebook1", "test");
Note note2 = newNoteWithParagraphs("Notebook2", "not test", "not test at all");
drainSearchEvents();
// when
List<Map<String, String>> results = noteSearchService.query("all");
// then
assertThat(results).isNotEmpty();
assertThat(results.size()).isEqualTo(1);
assertThat(results.get(0))
.containsEntry("id", formatId(note2.getId(), note2.getLastParagraph()));
}
@Test
public void canIndexAndQueryByNotebookName() throws IOException, InterruptedException {
// given
Note note1 = newNoteWithParagraph("Notebook1", "test");
Note note2 = newNoteWithParagraphs("Notebook2", "not test", "not test at all");
drainSearchEvents();
// when
List<Map<String, String>> results = noteSearchService.query("Notebook1");
// then
assertThat(results).isNotEmpty();
assertThat(results.size()).isEqualTo(1);
assertThat(results.get(0)).containsEntry("id", note1.getId());
}
@Test
public void canIndexAndQueryByParagraphTitle() throws IOException, InterruptedException {
// given
Note note1 = newNoteWithParagraph("Notebook1", "test", "testingTitleSearch");
Note note2 = newNoteWithParagraph("Notebook2", "not test", "notTestingTitleSearch");
drainSearchEvents();
// when
List<Map<String, String>> results = noteSearchService.query("testingTitleSearch");
// then
assertThat(results).isNotEmpty();
assertThat(results.size()).isAtLeast(1);
int TitleHits = 0;
for (Map<String, String> res : results) {
if (res.get("header").contains("testingTitleSearch")) {
TitleHits++;
}
}
assertThat(TitleHits).isAtLeast(1);
}
@Test
public void indexKeyContract() throws IOException, InterruptedException {
// given
Note note1 = newNoteWithParagraph("Notebook1", "test");
drainSearchEvents();
// when
String id = resultForQuery("test").get(0).get("id"); // LuceneSearch.ID_FIELD
// then
assertThat(id.split("/")).asList() // key structure <noteId>/paragraph/<paragraphId>
.containsAllOf(
note1.getId(), "paragraph", note1.getLastParagraph().getId()); // LuceneSearch.PARAGRAPH
}
@Test // (expected=IllegalStateException.class)
public void canNotSearchBeforeIndexing() {
// given NO noteSearchService.index() was called
// when
List<Map<String, String>> result = noteSearchService.query("anything");
// then
assertThat(result).isEmpty();
// assert logs were printed
// "ERROR org.apache.zeppelin.search.SearchService:97 - Failed to open index dir RAMDirectory"
}
@Test
public void canIndexAndReIndex() throws IOException, InterruptedException {
// given
Note note1 = newNoteWithParagraph("Notebook1", "test");
Note note2 = newNoteWithParagraphs("Notebook2", "not test", "not test at all");
drainSearchEvents();
// when
Paragraph p2 = note2.getLastParagraph();
p2.setText("test indeed");
noteSearchService.updateNoteIndex(note2);
noteSearchService.updateParagraphIndex(p2);
// then
List<Map<String, String>> results = noteSearchService.query("all");
assertThat(results).isEmpty();
results = noteSearchService.query("indeed");
assertThat(results).isNotEmpty();
}
@Test
public void canDeleteNull() throws IOException {
// give
// looks like a bug in web UI: it tries to delete a note twice (after it has just been deleted)
// when
noteSearchService.deleteNoteIndex(null);
}
@Test
public void canDeleteFromIndex() throws IOException, InterruptedException {
// given
Note note1 = newNoteWithParagraph("Notebook1", "test");
Note note2 = newNoteWithParagraphs("Notebook2", "not test", "not test at all");
drainSearchEvents();
assertThat(resultForQuery("Notebook2")).isNotEmpty();
// when
noteSearchService.deleteNoteIndex(note2);
// then
assertThat(noteSearchService.query("all")).isEmpty();
assertThat(resultForQuery("Notebook2")).isEmpty();
List<Map<String, String>> results = resultForQuery("test");
assertThat(results).isNotEmpty();
assertThat(results.size()).isEqualTo(1);
}
@Test
public void indexParagraphUpdatedOnNoteSave() throws IOException, InterruptedException {
// given: total 2 notebooks, 3 paragraphs
Note note1 = newNoteWithParagraph("Notebook1", "test");
Note note2 = newNoteWithParagraphs("Notebook2", "not test", "not test at all");
drainSearchEvents();
assertThat(resultForQuery("test").size()).isEqualTo(3);
// when
Paragraph p1 = note1.getLastParagraph();
p1.setText("no no no");
notebook.saveNote(note1, AuthenticationInfo.ANONYMOUS);
p1.getNote().fireParagraphUpdateEvent(p1);
drainSearchEvents();
// then
assertThat(resultForQuery("Notebook1").size()).isEqualTo(1);
List<Map<String, String>> results = resultForQuery("test");
assertThat(results).isNotEmpty();
assertThat(results.size()).isEqualTo(2);
// does not include Notebook1's paragraph any more
for (Map<String, String> result : results) {
assertThat(result.get("id").startsWith(note1.getId())).isFalse();
}
}
@Test
public void indexNoteNameUpdatedOnNoteSave() throws IOException, InterruptedException {
// given: total 2 notebooks, 3 paragraphs
Note note1 = newNoteWithParagraph("Notebook1", "test");
Note note2 = newNoteWithParagraphs("Notebook2", "not test", "not test at all");
drainSearchEvents();
assertThat(resultForQuery("test").size()).isEqualTo(3);
// when
note1.setName("NotebookN");
notebook.updateNote(note1, AuthenticationInfo.ANONYMOUS);
drainSearchEvents();
Thread.sleep(1000);
// then
assertThat(resultForQuery("Notebook1")).isEmpty();
assertThat(resultForQuery("NotebookN")).isNotEmpty();
assertThat(resultForQuery("NotebookN").size()).isEqualTo(1);
}
private List<Map<String, String>> resultForQuery(String q) {
return noteSearchService.query(q);
}
/**
* Creates a new Note \w given name, adds a new paragraph \w given text
*
* @param noteName name of the note
* @param parText text of the paragraph
* @return Note
*/
private Note newNoteWithParagraph(String noteName, String parText) throws IOException {
Note note1 = newNote(noteName);
addParagraphWithText(note1, parText);
return note1;
}
private Note newNoteWithParagraph(String noteName, String parText, String title) throws IOException {
Note note = newNote(noteName);
addParagraphWithTextAndTitle(note, parText, title);
return note;
}
/** Creates a new Note \w given name, adds N paragraphs \w given texts */
private Note newNoteWithParagraphs(String noteName, String... parTexts) throws IOException {
Note note1 = newNote(noteName);
for (String parText : parTexts) {
addParagraphWithText(note1, parText);
}
return note1;
}
private Paragraph addParagraphWithText(Note note, String text) {
Paragraph p = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
p.setText(text);
return p;
}
private Paragraph addParagraphWithTextAndTitle(Note note, String text, String title) {
Paragraph p = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
p.setText(text);
p.setTitle(title);
return p;
}
private Note newNote(String name) throws IOException {
return notebook.createNote(name, AuthenticationInfo.ANONYMOUS);
}
}
| |
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is the FRIL Framework.
*
* The Initial Developers of the Original Code are
* The Department of Math and Computer Science, Emory University and
* The Centers for Disease Control and Prevention.
* Portions created by the Initial Developer are Copyright (C) 2008
* the Initial Developer. All Rights Reserved.
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package cdc.datamodel.converters.ui;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import javax.swing.ButtonGroup;
import javax.swing.JCheckBox;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JSpinner;
import javax.swing.SpinnerNumberModel;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import cdc.gui.components.dynamicanalysis.ChangedConfigurationListener;
import cdc.gui.components.paramspanel.ParamPanelField;
public class TrimConverterField extends ParamPanelField {
public static final int OPTION_CUT = 1;
public static final int OPTION_LEAVE = 2;
public class ItemListenerProxy implements ItemListener {
private ChangedConfigurationListener listener;
public ItemListenerProxy(ChangedConfigurationListener propertyChangeListener) {
this.listener = propertyChangeListener;
}
public void itemStateChanged(ItemEvent e) {
listener.configurationChanged();
}
}
public class ChangeListenerProxy implements ChangeListener {
private ChangedConfigurationListener listener;
public ChangeListenerProxy(ChangedConfigurationListener propertyChangeListener) {
listener = propertyChangeListener;
}
public void stateChanged(ChangeEvent e) {
listener.configurationChanged();
}
}
public class ActionListenerProxy implements ActionListener {
private ChangedConfigurationListener listener;
public ActionListenerProxy(ChangedConfigurationListener propertyChangeListener) {
listener = propertyChangeListener;
}
public void actionPerformed(ActionEvent e) {
listener.configurationChanged();
}
}
private JCheckBox enable;
private JRadioButton radioCut;
private JRadioButton radioLeave;
private ButtonGroup group;
private JSpinner cutNumber;
private JSpinner leaveNumber;
private JLabel labelCut = new JLabel("Cut");
private JLabel labelLeave = new JLabel("Leave");
private JLabel labelCut1 = new JLabel(" characters");
private JLabel labelLeave1 = new JLabel(" characters");
private JLabel paramName;
private JPanel panel;
private String userLabel;
public TrimConverterField(JComponent parent, String param, String label, String defaultValue, ChangedConfigurationListener listener) {
this.userLabel = label;
JPanel filler = new JPanel();
filler.setMinimumSize(new Dimension(40,20));
enable = new JCheckBox("Enable");
radioCut = new JRadioButton();
radioLeave = new JRadioButton();
radioCut.setSelected(true);
radioLeave.setSelected(false);
group = new ButtonGroup();
group.add(radioCut);
group.add(radioLeave);
cutNumber = new JSpinner(new SpinnerNumberModel(0, 0, 1000, 1));
leaveNumber = new JSpinner(new SpinnerNumberModel(0, 0, 1000, 1));
enable.addItemListener(new ItemListener() {
public void itemStateChanged(ItemEvent arg0) {
if (enable.isSelected()) {
radioCut.setEnabled(true);
radioLeave.setEnabled(true);
if (radioCut.isSelected()) {
cutNumber.setEnabled(true);
labelCut.setEnabled(true);
labelCut1.setEnabled(true);
labelLeave.setEnabled(false);
labelLeave1.setEnabled(false);
} else {
leaveNumber.setEnabled(true);
labelCut.setEnabled(false);
labelCut1.setEnabled(false);
labelLeave.setEnabled(true);
labelLeave1.setEnabled(true);
}
} else {
radioCut.setEnabled(false);
radioLeave.setEnabled(false);
cutNumber.setEnabled(false);
leaveNumber.setEnabled(false);
labelCut.setEnabled(false);
labelCut1.setEnabled(false);
labelLeave.setEnabled(false);
labelLeave1.setEnabled(false);
}
}
});
radioCut.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
if (radioCut.isSelected()) {
leaveNumber.setEnabled(false);
cutNumber.setEnabled(true);
labelCut.setEnabled(true);
labelCut1.setEnabled(true);
labelLeave.setEnabled(false);
labelLeave1.setEnabled(false);
}
}
});
radioLeave.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
if (radioLeave.isSelected()) {
leaveNumber.setEnabled(true);
cutNumber.setEnabled(false);
labelCut.setEnabled(false);
labelCut1.setEnabled(false);
labelLeave.setEnabled(true);
labelLeave1.setEnabled(true);
}
}
});
paramName = new JLabel(label);
//paramName.setPreferredSize(new Dimension(200, (int)paramName.getPreferredSize().getHeight()));
panel = new JPanel();
panel.setLayout(new FlowLayout());
//panel.add(paramName);
panel.add(enable);
panel.add(filler);
panel.add(radioCut);
panel.add(labelCut);
panel.add(cutNumber);
panel.add(labelCut1);
panel.add(filler);
panel.add(radioLeave);
panel.add(labelLeave);
panel.add(leaveNumber);
panel.add(labelLeave1);
setValue(defaultValue);
if (listener != null) {
radioCut.addItemListener(new ItemListenerProxy(listener));
radioLeave.addItemListener(new ItemListenerProxy(listener));
leaveNumber.addChangeListener(new ChangeListenerProxy(listener));
cutNumber.addChangeListener(new ChangeListenerProxy(listener));
enable.addActionListener(new ActionListenerProxy(listener));
}
}
public void addConfigurationChangeListener(ChangedConfigurationListener configurationListener) {
throw new RuntimeException("Not implemented");
}
public void error(String message) {
// TODO Auto-generated method stub
}
public JComponent getComponentInputField() {
return panel;
}
public JComponent getComponentLabel() {
return paramName;
}
public String getUserLabel() {
return userLabel;
}
public String getValue() {
return (enable.isSelected() ? "1" : "0") + "," +
(radioCut.isSelected() ? OPTION_CUT : OPTION_LEAVE) + "," +
(radioCut.isSelected() ? cutNumber.getValue() : leaveNumber.getValue());
}
public void setValue(String val) {
String[] values = val.split(",");
if (Integer.parseInt(values[0]) != 0) {
enable.setSelected(true);
} else {
enable.setSelected(false);
}
int option = Integer.parseInt(values[1]);
int length = Integer.parseInt(values[2]);
cutNumber.setValue(new Integer(length));
leaveNumber.setValue(new Integer(length));
if (Integer.parseInt(values[0]) != 0) {
if (option == OPTION_CUT) {
radioCut.setSelected(true);
cutNumber.setEnabled(true);
leaveNumber.setEnabled(false);
labelCut.setEnabled(true);
labelCut1.setEnabled(true);
labelLeave.setEnabled(false);
labelLeave1.setEnabled(false);
} else {
radioLeave.setSelected(true);
cutNumber.setEnabled(false);
leaveNumber.setEnabled(true);
labelCut.setEnabled(false);
labelCut1.setEnabled(false);
labelLeave.setEnabled(true);
labelLeave1.setEnabled(true);
}
radioCut.setEnabled(true);
radioLeave.setEnabled(true);
} else {
radioCut.setEnabled(false);
radioLeave.setEnabled(false);
cutNumber.setEnabled(false);
leaveNumber.setEnabled(false);
labelCut.setEnabled(false);
labelCut1.setEnabled(false);
labelLeave.setEnabled(false);
labelLeave1.setEnabled(false);
}
}
public void removeConfigurationChangeListener(ChangedConfigurationListener configurationListener) {
throw new RuntimeException("Not implemented");
}
}
| |
/*
* ******************************************************************************
* Copyright 2014-2019 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
// This code is auto-generated, do not modify
package com.spectralogic.ds3client.commands.spectrads3;
import com.spectralogic.ds3client.networking.HttpVerb;
import com.spectralogic.ds3client.commands.interfaces.AbstractPaginationRequest;
import java.util.UUID;
import com.google.common.net.UrlEscapers;
import com.spectralogic.ds3client.models.StorageDomainMemberState;
import com.spectralogic.ds3client.models.WritePreferenceLevel;
public class GetStorageDomainMembersSpectraS3Request extends AbstractPaginationRequest {
// Variables
private boolean lastPage;
private int pageLength;
private int pageOffset;
private String pageStartMarker;
private String poolPartitionId;
private StorageDomainMemberState state;
private String storageDomainId;
private String tapePartitionId;
private String tapeType;
private WritePreferenceLevel writePreference;
// Constructor
public GetStorageDomainMembersSpectraS3Request() {
}
public GetStorageDomainMembersSpectraS3Request withLastPage(final boolean lastPage) {
this.lastPage = lastPage;
if (this.lastPage) {
this.getQueryParams().put("last_page", null);
} else {
this.getQueryParams().remove("last_page");
}
return this;
}
public GetStorageDomainMembersSpectraS3Request withPageLength(final int pageLength) {
this.pageLength = pageLength;
this.updateQueryParam("page_length", pageLength);
return this;
}
public GetStorageDomainMembersSpectraS3Request withPageOffset(final int pageOffset) {
this.pageOffset = pageOffset;
this.updateQueryParam("page_offset", pageOffset);
return this;
}
public GetStorageDomainMembersSpectraS3Request withPageStartMarker(final UUID pageStartMarker) {
this.pageStartMarker = pageStartMarker.toString();
this.updateQueryParam("page_start_marker", pageStartMarker);
return this;
}
public GetStorageDomainMembersSpectraS3Request withPageStartMarker(final String pageStartMarker) {
this.pageStartMarker = pageStartMarker;
this.updateQueryParam("page_start_marker", pageStartMarker);
return this;
}
public GetStorageDomainMembersSpectraS3Request withPoolPartitionId(final UUID poolPartitionId) {
this.poolPartitionId = poolPartitionId.toString();
this.updateQueryParam("pool_partition_id", poolPartitionId);
return this;
}
public GetStorageDomainMembersSpectraS3Request withPoolPartitionId(final String poolPartitionId) {
this.poolPartitionId = poolPartitionId;
this.updateQueryParam("pool_partition_id", poolPartitionId);
return this;
}
public GetStorageDomainMembersSpectraS3Request withState(final StorageDomainMemberState state) {
this.state = state;
this.updateQueryParam("state", state);
return this;
}
public GetStorageDomainMembersSpectraS3Request withStorageDomainId(final UUID storageDomainId) {
this.storageDomainId = storageDomainId.toString();
this.updateQueryParam("storage_domain_id", storageDomainId);
return this;
}
public GetStorageDomainMembersSpectraS3Request withStorageDomainId(final String storageDomainId) {
this.storageDomainId = storageDomainId;
this.updateQueryParam("storage_domain_id", storageDomainId);
return this;
}
public GetStorageDomainMembersSpectraS3Request withTapePartitionId(final UUID tapePartitionId) {
this.tapePartitionId = tapePartitionId.toString();
this.updateQueryParam("tape_partition_id", tapePartitionId);
return this;
}
public GetStorageDomainMembersSpectraS3Request withTapePartitionId(final String tapePartitionId) {
this.tapePartitionId = tapePartitionId;
this.updateQueryParam("tape_partition_id", tapePartitionId);
return this;
}
public GetStorageDomainMembersSpectraS3Request withTapeType(final String tapeType) {
this.tapeType = tapeType;
this.updateQueryParam("tape_type", tapeType);
return this;
}
public GetStorageDomainMembersSpectraS3Request withWritePreference(final WritePreferenceLevel writePreference) {
this.writePreference = writePreference;
this.updateQueryParam("write_preference", writePreference);
return this;
}
@Override
public HttpVerb getVerb() {
return HttpVerb.GET;
}
@Override
public String getPath() {
return "/_rest_/storage_domain_member";
}
public boolean getLastPage() {
return this.lastPage;
}
public int getPageLength() {
return this.pageLength;
}
public int getPageOffset() {
return this.pageOffset;
}
public String getPageStartMarker() {
return this.pageStartMarker;
}
public String getPoolPartitionId() {
return this.poolPartitionId;
}
public StorageDomainMemberState getState() {
return this.state;
}
public String getStorageDomainId() {
return this.storageDomainId;
}
public String getTapePartitionId() {
return this.tapePartitionId;
}
public String getTapeType() {
return this.tapeType;
}
public WritePreferenceLevel getWritePreference() {
return this.writePreference;
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.cx.v3beta1;
import com.google.api.pathtemplate.PathTemplate;
import com.google.api.resourcenames.ResourceName;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
@Generated("by gapic-generator-java")
public class FlowValidationResultName implements ResourceName {
private static final PathTemplate PROJECT_LOCATION_AGENT_FLOW =
PathTemplate.createWithoutUrlEncoding(
"projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/validationResult");
private volatile Map<String, String> fieldValuesMap;
private final String project;
private final String location;
private final String agent;
private final String flow;
@Deprecated
protected FlowValidationResultName() {
project = null;
location = null;
agent = null;
flow = null;
}
private FlowValidationResultName(Builder builder) {
project = Preconditions.checkNotNull(builder.getProject());
location = Preconditions.checkNotNull(builder.getLocation());
agent = Preconditions.checkNotNull(builder.getAgent());
flow = Preconditions.checkNotNull(builder.getFlow());
}
public String getProject() {
return project;
}
public String getLocation() {
return location;
}
public String getAgent() {
return agent;
}
public String getFlow() {
return flow;
}
public static Builder newBuilder() {
return new Builder();
}
public Builder toBuilder() {
return new Builder(this);
}
public static FlowValidationResultName of(
String project, String location, String agent, String flow) {
return newBuilder()
.setProject(project)
.setLocation(location)
.setAgent(agent)
.setFlow(flow)
.build();
}
public static String format(String project, String location, String agent, String flow) {
return newBuilder()
.setProject(project)
.setLocation(location)
.setAgent(agent)
.setFlow(flow)
.build()
.toString();
}
public static FlowValidationResultName parse(String formattedString) {
if (formattedString.isEmpty()) {
return null;
}
Map<String, String> matchMap =
PROJECT_LOCATION_AGENT_FLOW.validatedMatch(
formattedString, "FlowValidationResultName.parse: formattedString not in valid format");
return of(
matchMap.get("project"),
matchMap.get("location"),
matchMap.get("agent"),
matchMap.get("flow"));
}
public static List<FlowValidationResultName> parseList(List<String> formattedStrings) {
List<FlowValidationResultName> list = new ArrayList<>(formattedStrings.size());
for (String formattedString : formattedStrings) {
list.add(parse(formattedString));
}
return list;
}
public static List<String> toStringList(List<FlowValidationResultName> values) {
List<String> list = new ArrayList<>(values.size());
for (FlowValidationResultName value : values) {
if (value == null) {
list.add("");
} else {
list.add(value.toString());
}
}
return list;
}
public static boolean isParsableFrom(String formattedString) {
return PROJECT_LOCATION_AGENT_FLOW.matches(formattedString);
}
@Override
public Map<String, String> getFieldValuesMap() {
if (fieldValuesMap == null) {
synchronized (this) {
if (fieldValuesMap == null) {
ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder();
if (project != null) {
fieldMapBuilder.put("project", project);
}
if (location != null) {
fieldMapBuilder.put("location", location);
}
if (agent != null) {
fieldMapBuilder.put("agent", agent);
}
if (flow != null) {
fieldMapBuilder.put("flow", flow);
}
fieldValuesMap = fieldMapBuilder.build();
}
}
}
return fieldValuesMap;
}
public String getFieldValue(String fieldName) {
return getFieldValuesMap().get(fieldName);
}
@Override
public String toString() {
return PROJECT_LOCATION_AGENT_FLOW.instantiate(
"project", project, "location", location, "agent", agent, "flow", flow);
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o != null || getClass() == o.getClass()) {
FlowValidationResultName that = ((FlowValidationResultName) o);
return Objects.equals(this.project, that.project)
&& Objects.equals(this.location, that.location)
&& Objects.equals(this.agent, that.agent)
&& Objects.equals(this.flow, that.flow);
}
return false;
}
@Override
public int hashCode() {
int h = 1;
h *= 1000003;
h ^= Objects.hashCode(project);
h *= 1000003;
h ^= Objects.hashCode(location);
h *= 1000003;
h ^= Objects.hashCode(agent);
h *= 1000003;
h ^= Objects.hashCode(flow);
return h;
}
/**
* Builder for
* projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/validationResult.
*/
public static class Builder {
private String project;
private String location;
private String agent;
private String flow;
protected Builder() {}
public String getProject() {
return project;
}
public String getLocation() {
return location;
}
public String getAgent() {
return agent;
}
public String getFlow() {
return flow;
}
public Builder setProject(String project) {
this.project = project;
return this;
}
public Builder setLocation(String location) {
this.location = location;
return this;
}
public Builder setAgent(String agent) {
this.agent = agent;
return this;
}
public Builder setFlow(String flow) {
this.flow = flow;
return this;
}
private Builder(FlowValidationResultName flowValidationResultName) {
this.project = flowValidationResultName.project;
this.location = flowValidationResultName.location;
this.agent = flowValidationResultName.agent;
this.flow = flowValidationResultName.flow;
}
public FlowValidationResultName build() {
return new FlowValidationResultName(this);
}
}
}
| |
package org.workcraft.plugins.cpog.untangling;
import org.workcraft.dom.Connection;
import org.workcraft.dom.Container;
import org.workcraft.dom.Node;
import org.workcraft.plugins.cpog.Cpog;
import org.workcraft.plugins.cpog.VisualCpog;
import org.workcraft.plugins.cpog.VisualVertex;
import org.workcraft.plugins.cpog.VisualVertex.RenderType;
import org.workcraft.plugins.cpog.commands.PetriToCpogParameters;
import org.workcraft.plugins.cpog.untangling.UntanglingNode.NodeType;
import org.workcraft.plugins.petri.Petri;
import org.workcraft.plugins.petri.Place;
import org.workcraft.plugins.petri.Transition;
import org.workcraft.plugins.petri.VisualPetri;
import java.awt.geom.Point2D;
import java.util.ArrayList;
import java.util.LinkedHashMap;
public class PetriToCpogConverter {
private final Petri pn;
private final VisualCpog visualCpog;
private int xRightmostVertex;
/** constructor **/
public PetriToCpogConverter(VisualPetri vpn) {
this.pn = vpn.getMathModel();
this.visualCpog = new VisualCpog(new Cpog());
this.xRightmostVertex = -1;
}
/** function which performs the conversion
* @param settings **/
public VisualCpog run(PetriToCpogParameters settings) {
Untanglings untangling = new Untanglings(settings);
/*****************************************************
* Unpack Petri net and stream it into the converter *
*****************************************************/
// insert places
for (Place p : pn.getPlaces()) {
untangling.addPlace(pn.getNodeReference(p));
}
// insert transitions
for (Transition t : pn.getTransitions()) {
untangling.addTransition(pn.getNodeReference(t));
}
// insert connections
for (Place p : pn.getPlaces()) {
for (Node n : pn.getPostset(p)) {
if (n instanceof Transition) {
Transition t = (Transition) n;
untangling.placeToTransition(pn.getNodeReference(p), pn.getNodeReference(t));
// debug printing
// System.out.println(pn.getNodeReference(p) + " -> " + pn.getNodeReference(t));
}
}
}
for (Transition t : pn.getTransitions()) {
for (Node n : pn.getPostset(t)) {
if (n instanceof Place) {
Place p = (Place) n;
untangling.transitionToPlace(pn.getNodeReference(t), pn.getNodeReference(p));
// debug printing
// System.out.println(pn.getNodeReference(t) + " -> " + pn.getNodeReference(p));
}
}
}
// insert tokens
for (Place p : pn.getPlaces()) {
if (p.getTokens() > 0) {
if (!untangling.insertTokens(pn.getNodeReference(p), p.getTokens())) {
System.out.println("Place with this name not found. Tokens not inserted");
return null;
}
}
}
/*****************************************************
* Convert the Petri net into a Cpog *
*****************************************************/
// start conversion from Petri net to Cpog
if (!untangling.startConversion()) {
return null;
}
// getting the partial orders from the untangling
ArrayList<PartialOrder> partialOrders = untangling.getPartialOrders(settings);
// building the cpog from the partial orders
buildCpog(partialOrders);
return visualCpog;
}
/** building the cpog model from the string partial orders **/
@SuppressWarnings("deprecation")
private void buildCpog(ArrayList<PartialOrder> partialOrders) {
// Positions inside the workspace
int xPos = 0;
int yPos = 0;
int i = 0;
// looping over partial orders
for (PartialOrder po : partialOrders) {
i++;
// move the vertically every partial order
xPos = 0;
yPos = i * 10;
// debug printing: number of the partial order
// System.out.println("Partial Order " + (i + 1) + ":");
LinkedHashMap<String, VisualVertex> nodes = new LinkedHashMap<>();
Container container = visualCpog.getCurrentLevel();
visualCpog.selectNone();
// looping over the edges
for (UntanglingEdge edge : po) {
// creating source vertex
UntanglingNode sourceNode = edge.getFirst();
VisualVertex sourceVertex = visualCpog.createVisualVertex(container);
String sourceName = sourceNode.getLabel();
sourceVertex.setLabel(sourceName);
if (sourceNode.getType() == NodeType.PLACE) {
sourceVertex.setRenderType(RenderType.CIRCLE);
} else {
sourceVertex.setRenderType(RenderType.SQUARE);
}
// creating target vertex
UntanglingNode targetNode = edge.getSecond();
VisualVertex targetVertex = visualCpog.createVisualVertex(container);
String targetName = targetNode.getLabel();
targetVertex.setLabel(targetName);
if (targetNode.getType() == NodeType.PLACE) {
targetVertex.setRenderType(RenderType.CIRCLE);
} else {
targetVertex.setRenderType(RenderType.SQUARE);
}
// checking if they are already present
// if so do not create a new one but connect
// the one already available
if (nodes.containsKey(sourceName)) {
sourceVertex = nodes.get(sourceName);
} else {
sourceVertex.setPosition(new Point2D.Double(xPos, yPos));
xPos = xPos + 5;
nodes.put(sourceName, sourceVertex);
}
if (nodes.containsKey(targetName)) {
targetVertex = nodes.get(targetName);
} else {
targetVertex.setPosition(new Point2D.Double(xPos, yPos));
xPos = xPos + 5;
nodes.put(targetName, targetVertex);
}
// connection
visualCpog.connect(sourceVertex, targetVertex);
// debug: printing partial order
// System.out.println(source.getLabel() + " -> " + target.getLabel());
}
// removing the duplicates
for (VisualVertex v : visualCpog.getVertices(visualCpog.getRoot())) {
if (visualCpog.getConnections(v).isEmpty()) {
visualCpog.removeWithoutNotify(v);
}
}
//rearrange vertices
VisualVertex first = null;
for (VisualVertex v : visualCpog.getVertices(visualCpog.getRoot())) {
if (visualCpog.getPreset(v).isEmpty()) {
first = v;
break;
}
}
xPos = 0;
yPos = i * 10;
xRightmostVertex = -1;
visitGraph(first, xPos, yPos);
// renaming the partial order
visualCpog.selectAll();
visualCpog.groupSelection("Partial Order " + (i + 1));
}
}
/** visit the graph rearranging the vertices by causality relationships **/
private void visitGraph(VisualVertex vertex, int xPos, int yPos) {
// termination condition:
// if node has no more connection the path
// has been visited
if (visualCpog.getPostset(vertex).isEmpty()) {
// last vertex must be place after all the other ones
xRightmostVertex = xRightmostVertex + 5;
int numberPreConnections = visualCpog.getConnections(vertex).size();
// if last vertex has got more than one pre vertices
// it must be placed on the right of the rightmost
// pre vertex
if (numberPreConnections > 1) {
xPos = xRightmostVertex;
}
// set position of last vertex
vertex.setPosition(new Point2D.Double(xPos, yPos));
return;
}
// counting postSet of a vertex
int numberPostConnections = 0;
for (Connection connection : visualCpog.getConnections(vertex)) {
if (connection.getFirst().equals(vertex)) {
numberPostConnections++;
}
}
// variables for the rearranging the positions
int x = xPos;
int y = yPos;
int nc = 0;
if (xRightmostVertex < x) {
xRightmostVertex = x;
}
// looping over the post vertices
for (Connection connection : visualCpog.getConnections(vertex)) {
if (connection.getFirst().equals(vertex)) {
// number of post vertices
nc++;
// select the post vertex
VisualVertex postVertex = (VisualVertex) connection.getSecond();
// set the position to current vertex
vertex.setPosition(new Point2D.Double(x, y));
// increment horizontal position
x = x + 5;
// if more post vertices exist, modify the vertical
// position of them (next vertices)
if (numberPostConnections > 1 && nc == 1) {
y = y - numberPostConnections;
} else {
if (numberPostConnections != 1) {
for (int j = 0; j < nc; j++) {
y = y + numberPostConnections;
}
}
}
// call recursively the function
visitGraph(postVertex, x, y);
// backtrack in case of more post vertices
x = x - 5;
}
}
}
}
| |
package com.lucidworks.hadoop.ingest;
import com.lucidworks.hadoop.cache.DistributedCacheHandler;
import com.lucidworks.hadoop.ingest.util.GrokHelper;
import com.lucidworks.hadoop.io.LWDocument;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
import org.jruby.RubyHash;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.script.ScriptContext;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class GrokIngestMapper extends AbstractIngestMapper<LongWritable, Text> {
private transient static Logger log = LoggerFactory.getLogger(GrokIngestMapper.class);
public static final String GROK_URI = "grok.uri";
public static final String GROK_CONFIG_PATH = "grok.config.path";
public static final String ADDITIONAL_PATTERNS = "grok.additional.patterns";
public static final String LOG_RUBY_PARAM = "log";
public static final String CONFIG_STRING_RUBY_PARAM = "config_string_param";
public static final String ADDITIONAL_PATTERNS_RUBY_PARAM = "additional_patterns";
public static final String FILTERS_ARRAY_RUBY_PARAM = "filters";
public static final String LOADER_RUBY_CLASS = "logstash-mapper/loader.rb";
public static final String MATCHER_RUBY_CLASS = "logstash-mapper/matcher.rb";
public static final String PATTERN_HANDLER_RUBY_CLASS = "logstash-mapper/pattern_handler.rb";
public static final String PATH_FIELD_NAME = "path";
public static final String BYTE_OFFSET_FIELD_NAME = "byte_offset";
private Object filters;
private final AbstractJobFixture fixture = new AbstractJobFixture() {
@Override
public void init(JobConf conf) throws IOException {
fillDistributeCache(conf);
}
};
@Override
public void configure(JobConf conf) {
super.configure(conf);
String configurationString = DistributedCacheHandler.getFileFromCache(conf, GROK_CONFIG_PATH);
String additionalPatternsPaths = conf.get(ADDITIONAL_PATTERNS);
StringBuilder additionalPatternsParam = new StringBuilder();
if (additionalPatternsPaths != null) {
String[] paths = additionalPatternsPaths.split(";");
for (String currentPath : paths) {
String content = DistributedCacheHandler.getFileFromCache(conf, currentPath);
additionalPatternsParam.append(content);
}
}
if (configurationString == null || configurationString.isEmpty()) {
throw new RuntimeException("Grok configuration not found at: " + conf.get(GROK_CONFIG_PATH) + " and URI: " +
conf.get(GROK_URI));
}
Map<String, Object> params = new HashMap<String, Object>();
params.put(CONFIG_STRING_RUBY_PARAM, configurationString);
params.put(ADDITIONAL_PATTERNS_RUBY_PARAM, additionalPatternsParam.toString().trim());
List<String> toRemove = new ArrayList<String>();
toRemove.add(CONFIG_STRING_RUBY_PARAM);
toRemove.add(ADDITIONAL_PATTERNS_RUBY_PARAM);
Object response = executeScript(LOADER_RUBY_CLASS, params, toRemove);
if (response != null) {
filters = response;
} else {
throw new RuntimeException("Filters are null");
}
}
@Override
public AbstractJobFixture getFixture() {
return fixture;
}
@Override
protected LWDocument[] toDocuments(LongWritable key, Text value, Reporter reporter,
Configuration conf) throws IOException {
Map<String, Object> params = new HashMap<String, Object>();
params.put(LOG_RUBY_PARAM, value.toString());
params.put(FILTERS_ARRAY_RUBY_PARAM, filters);
List<String> toRemoveList = new ArrayList<String>();
toRemoveList.add(LOG_RUBY_PARAM);
toRemoveList.add(FILTERS_ARRAY_RUBY_PARAM);
Object response = executeScript(MATCHER_RUBY_CLASS, params, toRemoveList);
try {
RubyHash hash = (RubyHash) response;
if (response != null) {
Set<String> keys = hash.keySet();
LWDocument document = createDocument();
for (String currentKey : keys) {
document.addField(currentKey, hash.get(currentKey));
}
// Adding the file where this log was taken
FileSplit fileSplit = (FileSplit) reporter.getInputSplit();
String originalLogFilePath = fileSplit.getPath().toUri().getPath();
document.addField(PATH_FIELD_NAME, originalLogFilePath);
// Adding offset value
document.addField(BYTE_OFFSET_FIELD_NAME, key.toString());
// Set ID
document.setId(originalLogFilePath + "-" + key.toString() + "-" + System.currentTimeMillis());
return new LWDocument[] {document};
} else {
return null;
}
} catch (Exception e) {
log.error("Error: " + e.getMessage());
throw new RuntimeException("Error executing ruby script");
}
}
public static Object executeScript(String resourcePath, Map<String, Object> params,
List<String> attributesToRemove) {
ScriptEngineManager manager = new ScriptEngineManager();
ScriptEngine engine = manager.getEngineByName("ruby");
InputStream resource = GrokIngestMapper.class.getClassLoader().getResourceAsStream(resourcePath);
for (String toRemove : attributesToRemove) {
engine.getContext().setAttribute(toRemove, params.get(toRemove),
ScriptContext.ENGINE_SCOPE);// necessary limit the scope to just engine
}
for (Map.Entry<String, Object> entry : params.entrySet()) {
manager.put(entry.getKey(), entry.getValue());
}
if (resource == null) {
throw new RuntimeException("Resource not found " + resourcePath);
}
if (engine == null) {
throw new RuntimeException("Script engine can not be created");
}
InputStreamReader is = new InputStreamReader(resource);
try {
Object response = engine.eval(is);
return response;
} catch (Exception e) {
log.error("Error executing script: " + e.getMessage(), e);
throw new RuntimeException("Error executing ruby script", e);
}
}
private static void fillDistributeCache(JobConf conf) throws RuntimeException {
String grokURI = conf.get(GROK_URI, null);
if (grokURI == null) {
throw new RuntimeException("You must specify the -D" + GROK_URI);
}
try {
DistributedCacheHandler.addFileToCache(conf, new Path(grokURI), GROK_CONFIG_PATH);
} catch (Exception e) {
System.out.println("Error caching grok configuration file: " + e.getMessage());
}
// To find patterns_dir
String configuration = GrokHelper.readConfiguration(grokURI, conf);
handlePatternDir(conf, configuration);
}
private static void handlePatternDir(JobConf conf, String configuration) {
Map<String, Object> params = new HashMap<String, Object>();
params.put(CONFIG_STRING_RUBY_PARAM, configuration);
Object response = executeScript(PATTERN_HANDLER_RUBY_CLASS, params, new ArrayList<String>());
try {
GrokHelper.addPatternDirToDC(response, conf);
} catch (Exception e) {
System.out.println("Error caching grok additional patterns: " + e.getMessage());
}
}
}
| |
package main.tut07;
import com.jogamp.newt.event.KeyEvent;
import com.jogamp.opengl.GL3;
import glm.mat.Mat4;
import glm.vec._3.Vec3;
import main.framework.Framework;
import main.framework.component.Mesh;
import org.xml.sax.SAXException;
import uno.glm.MatrixStack;
import uno.glsl.Program;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.logging.Level;
import java.util.logging.Logger;
import static com.jogamp.opengl.GL.*;
import static com.jogamp.opengl.GL2ES3.GL_COLOR;
import static com.jogamp.opengl.GL2ES3.GL_DEPTH;
import static com.jogamp.opengl.GL3.GL_DEPTH_CLAMP;
import static glm.GlmKt.glm;
/**
* @author gbarbieri
*/
public class WorldScene extends Framework {
public static void main(String[] args) {
new WorldScene().setup("Tutorial 07 - World Scene");
}
private interface MESH {
int CONE = 0;
int CYLINDER = 1;
int CUBE_TINT = 2;
int CUBE_COLOR = 3;
int PLANE = 4;
int MAX = 5;
}
private final String[] MESHES_SOURCE = {"UnitConeTint.xml", "UnitCylinderTint.xml", "UnitCubeTint.xml", "UnitCubeColor.xml", "UnitPlane.xml"};
private ProgramData uniformColor, objectColor, uniformColorTint;
private Mesh[] meshes = new Mesh[MESH.MAX];
private Vec3 sphereCamRelPos = new Vec3(67.5f, -46.0f, 150.0f), camTarget = new Vec3(0.0f, 0.4f, 0.0f);
private boolean drawLookAtPoint = false;
@Override
public void init(GL3 gl) {
initializeProgram(gl);
for (int i = 0; i < MESH.MAX; i++) {
try {
meshes[i] = new Mesh(gl, getClass(), "tut07/" + MESHES_SOURCE[i]);
} catch (ParserConfigurationException | SAXException | IOException | URISyntaxException ex) {
Logger.getLogger(WorldScene.class.getName()).log(Level.SEVERE, null, ex);
}
}
gl.glEnable(GL_CULL_FACE);
gl.glCullFace(GL_BACK);
gl.glFrontFace(GL_CW);
gl.glEnable(GL_DEPTH_TEST);
gl.glDepthMask(true);
gl.glDepthFunc(GL_LEQUAL);
gl.glDepthRangef(0.0f, 1.0f);
gl.glEnable(GL_DEPTH_CLAMP);
}
private void initializeProgram(GL3 gl) {
uniformColor = new ProgramData(gl, "pos-only-world-transform.vert", "color-uniform.frag");
objectColor = new ProgramData(gl, "pos-color-world-transform.vert", "color-passthrough.frag");
uniformColorTint = new ProgramData(gl, "pos-color-world-transform.vert", "color-mult-uniform.frag");
}
@Override
public void display(GL3 gl) {
gl.glClearBufferfv(GL_COLOR, 0, clearColor.put(0, 0.0f).put(1, 0.0f).put(2, 0.0f).put(3, 0.0f));
gl.glClearBufferfv(GL_DEPTH, 0, clearDepth.put(0, 1.0f));
final Vec3 camPos = resolveCamPosition();
// camMat
calcLookAtMatrix(camPos, camTarget, new Vec3(0.0f, 1.0f, 0.0f)).to(matBuffer);
gl.glUseProgram(uniformColor.theProgram);
gl.glUniformMatrix4fv(uniformColor.worldToCameraMatrixUnif, 1, false, matBuffer);
gl.glUseProgram(objectColor.theProgram);
gl.glUniformMatrix4fv(objectColor.worldToCameraMatrixUnif, 1, false, matBuffer);
gl.glUseProgram(uniformColorTint.theProgram);
gl.glUniformMatrix4fv(uniformColorTint.worldToCameraMatrixUnif, 1, false, matBuffer);
gl.glUseProgram(0);
MatrixStack modelMatrix = new MatrixStack();
// Render the ground plane
{
modelMatrix
.push()
.scale(100.0f, 1.0f, 100.0f)
.top().to(matBuffer);
gl.glUseProgram(uniformColor.theProgram);
gl.glUniformMatrix4fv(uniformColor.modelToWorldMatrixUnif, 1, false, matBuffer);
gl.glUniform4f(uniformColor.baseColorUnif, 0.302f, 0.416f, 0.0589f, 1.0f);
meshes[MESH.PLANE].render(gl);
gl.glUseProgram(0);
modelMatrix.pop();
}
// Draw the trees
drawForest(gl, modelMatrix);
// Draw the building
{
modelMatrix
.push()
.translate(20.0f, 0.0f, -10.0f);
drawParthenon(gl, modelMatrix);
modelMatrix.pop();
}
if (drawLookAtPoint) {
gl.glDisable(GL_DEPTH_TEST);
Vec3 camAimVec = camTarget.minus(camPos);
modelMatrix
.push()
.translate(0.0f, 0.0f, -glm.length(camAimVec))
.scale(1.0f)
.top().to(matBuffer);
gl.glUseProgram(objectColor.theProgram);
gl.glUniformMatrix4fv(objectColor.modelToWorldMatrixUnif, 1, false, matBuffer);
gl.glUniformMatrix4fv(objectColor.worldToCameraMatrixUnif, 1, false, new Mat4(1.0f).to(matBuffer));
meshes[MESH.CUBE_COLOR].render(gl);
gl.glUseProgram(0);
modelMatrix.pop();
gl.glEnable(GL_DEPTH_TEST);
}
}
private Vec3 resolveCamPosition() {
float phi = glm.radians(sphereCamRelPos.x);
float theta = glm.radians(sphereCamRelPos.y + 90.0f);
float sinTheta = glm.sin(theta);
float cosTheta = glm.cos(theta);
float cosPhi = glm.cos(phi);
float sinPhi = glm.sin(phi);
Vec3 dirToCamera = new Vec3(sinTheta * cosPhi, cosTheta, sinTheta * sinPhi);
return dirToCamera.times_(sphereCamRelPos.z).plus_(camTarget);
}
private Mat4 calcLookAtMatrix(Vec3 cameraPt, Vec3 lookPt, Vec3 upPt) {
Vec3 lookDir = lookPt.minus(cameraPt).normalize();
Vec3 upDir = upPt.normalize();
Vec3 rightDir = lookDir.cross(upDir).normalize();
Vec3 perpUpDir = rightDir.cross(lookDir);
Mat4 rotMat = new Mat4(1.0f);
rotMat.set(0, rightDir, 0.0f);
rotMat.set(1, perpUpDir, 0.0f);
rotMat.set(2, lookDir.negate(), 0.0f);
rotMat.transpose_();
Mat4 transMat = new Mat4(1.0f);
transMat.set(3, cameraPt.negate(), 1.0f);
return rotMat.times_(transMat);
}
private void drawForest(GL3 gl, MatrixStack modelMatrix) {
for (TreeData tree : forest) {
modelMatrix
.push()
.translate(tree.xPos, 1.0f, tree.zPos);
drawTree(gl, modelMatrix, tree.trunkHeight, tree.coneHeight);
modelMatrix.pop();
}
}
private void drawTree(GL3 gl, MatrixStack modelStack, float trunkHeight, float coneHeight) {
// Draw trunk
{
modelStack.push();
modelStack
.scale(1.0f, trunkHeight, 1.0f)
.translate(0.0f, 0.5f, 0.0f)
.top().to(matBuffer);
gl.glUseProgram(uniformColorTint.theProgram);
gl.glUniformMatrix4fv(uniformColorTint.modelToWorldMatrixUnif, 1, false, matBuffer);
gl.glUniform4f(uniformColorTint.baseColorUnif, 0.694f, 0.4f, 0.106f, 1.0f);
meshes[MESH.CYLINDER].render(gl);
gl.glUseProgram(0);
modelStack.pop();
}
// Draw the treetop
{
modelStack.push()
.translate(0.0f, trunkHeight, 0.0f)
.scale(3.0f, coneHeight, 3.0f)
.top().to(matBuffer);
gl.glUseProgram(uniformColorTint.theProgram);
gl.glUniformMatrix4fv(uniformColorTint.modelToWorldMatrixUnif, 1, false, matBuffer);
gl.glUniform4f(uniformColorTint.baseColorUnif, 0.0f, 1.0f, 0.0f, 1.0f);
meshes[MESH.CONE].render(gl);
gl.glUseProgram(0);
modelStack.pop();
}
}
private void drawParthenon(GL3 gl, MatrixStack modelMatrix) {
final float parthenonWidth = 14.0f;
final float parthenonLength = 20.0f;
final float parthenonColumnHeight = 5.0f;
final float parthenonBaseHeight = 1.0f;
final float parthenonTopHeight = 2.0f;
// Draw base
{
modelMatrix
.push()
.scale(parthenonWidth, parthenonBaseHeight, parthenonLength)
.translate(0.0f, 0.5f, 0.0f)
.top().to(matBuffer);
gl.glUseProgram(uniformColorTint.theProgram);
gl.glUniformMatrix4fv(uniformColorTint.modelToWorldMatrixUnif, 1, false, matBuffer);
gl.glUniform4f(uniformColorTint.baseColorUnif, 0.9f, 0.9f, 0.9f, 0.9f);
meshes[MESH.CUBE_TINT].render(gl);
gl.glUseProgram(0);
modelMatrix.pop();
}
// Draw top
{
modelMatrix.push()
.translate(0.0f, parthenonColumnHeight + parthenonBaseHeight, 0.0f)
.scale(parthenonWidth, parthenonTopHeight, parthenonLength)
.translate(0.0f, 0.5f, 0.0f)
.top().to(matBuffer);
gl.glUseProgram(uniformColorTint.theProgram);
gl.glUniformMatrix4fv(uniformColorTint.modelToWorldMatrixUnif, 1, false, matBuffer);
gl.glUniform4f(uniformColorTint.baseColorUnif, 0.9f, 0.9f, 0.9f, 0.9f);
meshes[MESH.CUBE_TINT].render(gl);
gl.glUseProgram(0);
modelMatrix.pop();
}
// Draw columns
final float frontZval = parthenonLength / 2.0f - 1.0f;
final float rightXval = parthenonWidth / 2.0f - 1.0f;
for (int iColumnNum = 0; iColumnNum < ((int) parthenonWidth / 2.0f); iColumnNum++) {
{
modelMatrix
.push()
.translate(2.0f * iColumnNum - parthenonWidth / 2 + 1.0f, parthenonBaseHeight, frontZval);
drawColumn(gl, modelMatrix, parthenonColumnHeight);
modelMatrix.pop();
}
{
modelMatrix
.push()
.translate(2.0f * iColumnNum - parthenonWidth / 2.0f + 1.0f, parthenonBaseHeight, -frontZval);
drawColumn(gl, modelMatrix, parthenonColumnHeight);
modelMatrix.pop();
}
}
//Don't draw the first or last columns, since they've been drawn already.
for (int iColumnNum = 1; iColumnNum < ((int) ((parthenonLength - 2.0f) / 2.0f)); iColumnNum++) {
{
modelMatrix
.push()
.translate(rightXval, parthenonBaseHeight, 2.0f * iColumnNum - parthenonLength / 2.0f + 1.0f);
drawColumn(gl, modelMatrix, parthenonColumnHeight);
modelMatrix.pop();
}
{
modelMatrix
.push()
.translate(-rightXval, parthenonBaseHeight, 2.0f * iColumnNum - parthenonLength / 2.0f + 1.0f);
drawColumn(gl, modelMatrix, parthenonColumnHeight);
modelMatrix.pop();
}
}
// Draw interior
{
modelMatrix
.push()
.translate(0.0f, 1.0f, 0.0f)
.scale(parthenonWidth - 6.0f, parthenonColumnHeight, parthenonLength - 6.0f)
.translate(0.0f, 0.5f, 0.0f)
.top().to(matBuffer);
gl.glUseProgram(objectColor.theProgram);
gl.glUniformMatrix4fv(objectColor.modelToWorldMatrixUnif, 1, false, matBuffer);
meshes[MESH.CUBE_COLOR].render(gl);
gl.glUseProgram(0);
modelMatrix.pop();
}
// Draw headpiece
{
modelMatrix
.push()
.translate(
0.0f,
parthenonColumnHeight + parthenonBaseHeight + parthenonTopHeight / 2.0f,
parthenonLength / 2.0f)
.rotateX(-135.0f)
.rotateY(45.0f)
.top().to(matBuffer);
gl.glUseProgram(objectColor.theProgram);
gl.glUniformMatrix4fv(objectColor.modelToWorldMatrixUnif, 1, false, matBuffer);
meshes[MESH.CUBE_COLOR].render(gl);
gl.glUseProgram(0);
modelMatrix.pop();
}
}
//Columns are 1x1 in the X/Z, and fHieght units in the Y.
private void drawColumn(GL3 gl, MatrixStack modelMatrix, float parthenonColumnHeight) {
final float columnBaseHeight = 0.25f;
//Draw the bottom of the column.
{
modelMatrix
.push()
.scale(1.0f, columnBaseHeight, 1.0f)
.translate(0.0f, 0.5f, 0.0f)
.top().to(matBuffer);
gl.glUseProgram(uniformColorTint.theProgram);
gl.glUniformMatrix4fv(uniformColorTint.modelToWorldMatrixUnif, 1, false, matBuffer);
gl.glUniform4f(uniformColorTint.baseColorUnif, 1.0f, 1.0f, 1.0f, 1.0f);
meshes[MESH.CUBE_TINT].render(gl);
gl.glUseProgram(0);
modelMatrix.pop();
}
//Draw the top of the column.
{
modelMatrix
.push()
.translate(0.0f, parthenonColumnHeight - columnBaseHeight, 0.0f)
.scale(1.0f, columnBaseHeight, 1.0f)
.translate(0.0f, 0.5f, 0.0f)
.top().to(matBuffer);
gl.glUseProgram(uniformColorTint.theProgram);
gl.glUniformMatrix4fv(uniformColorTint.modelToWorldMatrixUnif, 1, false, matBuffer);
gl.glUniform4f(uniformColorTint.baseColorUnif, 0.9f, 0.9f, 0.9f, 0.9f);
meshes[MESH.CUBE_TINT].render(gl);
gl.glUseProgram(0);
modelMatrix.pop();
}
//Draw the main column.
{
modelMatrix
.push()
.translate(0.0f, columnBaseHeight, 0.0f)
.scale(0.8f, parthenonColumnHeight - columnBaseHeight * 2.0f, 0.8f)
.translate(0.0f, 0.5f, 0.0f)
.top().to(matBuffer);
gl.glUseProgram(uniformColorTint.theProgram);
gl.glUniformMatrix4fv(uniformColorTint.modelToWorldMatrixUnif, 1, false, matBuffer);
gl.glUniform4f(uniformColorTint.baseColorUnif, 0.9f, 0.9f, 0.9f, 0.9f);
meshes[MESH.CYLINDER].render(gl);
gl.glUseProgram(0);
modelMatrix.pop();
}
}
@Override
public void reshape(GL3 gl, int w, int h) {
float zNear = 1.0f, zFar = 1000.0f;
new MatrixStack()
.perspective(45.0f, w / (float) h, zNear, zFar)
.top().to(matBuffer);
gl.glUseProgram(uniformColor.theProgram);
gl.glUniformMatrix4fv(uniformColor.cameraToClipMatrixUnif, 1, false, matBuffer);
gl.glUseProgram(objectColor.theProgram);
gl.glUniformMatrix4fv(objectColor.cameraToClipMatrixUnif, 1, false, matBuffer);
gl.glUseProgram(uniformColorTint.theProgram);
gl.glUniformMatrix4fv(uniformColorTint.cameraToClipMatrixUnif, 1, false, matBuffer);
gl.glUseProgram(0);
gl.glViewport(0, 0, w, h);
}
@Override
public void end(GL3 gl) {
gl.glDeleteProgram(uniformColor.theProgram);
gl.glDeleteProgram(objectColor.theProgram);
gl.glDeleteProgram(uniformColorTint.theProgram);
for (int i = 0; i < meshes.length; i++)
meshes[i].dispose(gl);
}
@Override
public void keyPressed(KeyEvent e) {
switch (e.getKeyCode()) {
case KeyEvent.VK_W:
camTarget.z -= e.isShiftDown() ? 0.4f : 4.0f;
break;
case KeyEvent.VK_S:
camTarget.z += e.isShiftDown() ? 0.4f : 4.0f;
break;
case KeyEvent.VK_D:
camTarget.x += e.isShiftDown() ? 0.4f : 4.0f;
break;
case KeyEvent.VK_A:
camTarget.x -= e.isShiftDown() ? 0.4f : 4.0f;
break;
case KeyEvent.VK_E:
camTarget.y -= e.isShiftDown() ? 0.4f : 4.0f;
break;
case KeyEvent.VK_Q:
camTarget.y += e.isShiftDown() ? 0.4f : 4.0f;
break;
case KeyEvent.VK_I:
sphereCamRelPos.y -= e.isShiftDown() ? 1.125f : 11.25f;
break;
case KeyEvent.VK_K:
sphereCamRelPos.y += e.isShiftDown() ? 1.125f : 11.25f;
break;
case KeyEvent.VK_J:
sphereCamRelPos.x -= e.isShiftDown() ? 1.125f : 11.25f;
break;
case KeyEvent.VK_L:
sphereCamRelPos.x += e.isShiftDown() ? 1.125f : 11.25f;
break;
case KeyEvent.VK_O:
sphereCamRelPos.z -= e.isShiftDown() ? 1.125f : 11.25f;
break;
case KeyEvent.VK_U:
sphereCamRelPos.z += e.isShiftDown() ? 1.125f : 11.25f;
break;
case KeyEvent.VK_SPACE:
drawLookAtPoint = !drawLookAtPoint;
// camTarget.print("Target"); TODO
// sphereCamRelPos.print("Position");
break;
case KeyEvent.VK_ESCAPE:
quit();
break;
}
sphereCamRelPos.y = glm.clamp(sphereCamRelPos.y, -78.75f, -1.0f);
camTarget.y = glm.clamp(camTarget.y, 0.0f, camTarget.y);
sphereCamRelPos.z = glm.clamp(sphereCamRelPos.z, 5.0f, sphereCamRelPos.z);
}
class ProgramData {
int theProgram;
int modelToWorldMatrixUnif;
int worldToCameraMatrixUnif;
int cameraToClipMatrixUnif;
int baseColorUnif;
public ProgramData(GL3 gl, String vert, String frag) {
theProgram = new Program(gl, getClass(), "tut07", vert, frag).name;
modelToWorldMatrixUnif = gl.glGetUniformLocation(theProgram, "modelToWorldMatrix");
worldToCameraMatrixUnif = gl.glGetUniformLocation(theProgram, "worldToCameraMatrix");
cameraToClipMatrixUnif = gl.glGetUniformLocation(theProgram, "cameraToClipMatrix");
baseColorUnif = gl.glGetUniformLocation(theProgram, "baseColor");
}
}
TreeData[] forest = {
new TreeData(-45.0f, -40.0f, 2.0f, 3.0f),
new TreeData(-42.0f, -35.0f, 2.0f, 3.0f),
new TreeData(-39.0f, -29.0f, 2.0f, 4.0f),
new TreeData(-44.0f, -26.0f, 3.0f, 3.0f),
new TreeData(-40.0f, -22.0f, 2.0f, 4.0f),
new TreeData(-36.0f, -15.0f, 3.0f, 3.0f),
new TreeData(-41.0f, -11.0f, 2.0f, 3.0f),
new TreeData(-37.0f, -6.0f, 3.0f, 3.0f),
new TreeData(-45.0f, 0.0f, 2.0f, 3.0f),
new TreeData(-39.0f, 4.0f, 3.0f, 4.0f),
new TreeData(-36.0f, 8.0f, 2.0f, 3.0f),
new TreeData(-44.0f, 13.0f, 3.0f, 3.0f),
new TreeData(-42.0f, 17.0f, 2.0f, 3.0f),
new TreeData(-38.0f, 23.0f, 3.0f, 4.0f),
new TreeData(-41.0f, 27.0f, 2.0f, 3.0f),
new TreeData(-39.0f, 32.0f, 3.0f, 3.0f),
new TreeData(-44.0f, 37.0f, 3.0f, 4.0f),
new TreeData(-36.0f, 42.0f, 2.0f, 3.0f),
//
new TreeData(-32.0f, -45.0f, 2.0f, 3.0f),
new TreeData(-30.0f, -42.0f, 2.0f, 4.0f),
new TreeData(-34.0f, -38.0f, 3.0f, 5.0f),
new TreeData(-33.0f, -35.0f, 3.0f, 4.0f),
new TreeData(-29.0f, -28.0f, 2.0f, 3.0f),
new TreeData(-26.0f, -25.0f, 3.0f, 5.0f),
new TreeData(-35.0f, -21.0f, 3.0f, 4.0f),
new TreeData(-31.0f, -17.0f, 3.0f, 3.0f),
new TreeData(-28.0f, -12.0f, 2.0f, 4.0f),
new TreeData(-29.0f, -7.0f, 3.0f, 3.0f),
new TreeData(-26.0f, -1.0f, 2.0f, 4.0f),
new TreeData(-32.0f, 6.0f, 2.0f, 3.0f),
new TreeData(-30.0f, 10.0f, 3.0f, 5.0f),
new TreeData(-33.0f, 14.0f, 2.0f, 4.0f),
new TreeData(-35.0f, 19.0f, 3.0f, 4.0f),
new TreeData(-28.0f, 22.0f, 2.0f, 3.0f),
new TreeData(-33.0f, 26.0f, 3.0f, 3.0f),
new TreeData(-29.0f, 31.0f, 3.0f, 4.0f),
new TreeData(-32.0f, 38.0f, 2.0f, 3.0f),
new TreeData(-27.0f, 41.0f, 3.0f, 4.0f),
new TreeData(-31.0f, 45.0f, 2.0f, 4.0f),
new TreeData(-28.0f, 48.0f, 3.0f, 5.0f),
//
new TreeData(-25.0f, -48.0f, 2.0f, 3.0f),
new TreeData(-20.0f, -42.0f, 3.0f, 4.0f),
new TreeData(-22.0f, -39.0f, 2.0f, 3.0f),
new TreeData(-19.0f, -34.0f, 2.0f, 3.0f),
new TreeData(-23.0f, -30.0f, 3.0f, 4.0f),
new TreeData(-24.0f, -24.0f, 2.0f, 3.0f),
new TreeData(-16.0f, -21.0f, 2.0f, 3.0f),
new TreeData(-17.0f, -17.0f, 3.0f, 3.0f),
new TreeData(-25.0f, -13.0f, 2.0f, 4.0f),
new TreeData(-23.0f, -8.0f, 2.0f, 3.0f),
new TreeData(-17.0f, -2.0f, 3.0f, 3.0f),
new TreeData(-16.0f, 1.0f, 2.0f, 3.0f),
new TreeData(-19.0f, 4.0f, 3.0f, 3.0f),
new TreeData(-22.0f, 8.0f, 2.0f, 4.0f),
new TreeData(-21.0f, 14.0f, 2.0f, 3.0f),
new TreeData(-16.0f, 19.0f, 2.0f, 3.0f),
new TreeData(-23.0f, 24.0f, 3.0f, 3.0f),
new TreeData(-18.0f, 28.0f, 2.0f, 4.0f),
new TreeData(-24.0f, 31.0f, 2.0f, 3.0f),
new TreeData(-20.0f, 36.0f, 2.0f, 3.0f),
new TreeData(-22.0f, 41.0f, 3.0f, 3.0f),
new TreeData(-21.0f, 45.0f, 2.0f, 3.0f),
//
new TreeData(-12.0f, -40.0f, 2.0f, 4.0f),
new TreeData(-11.0f, -35.0f, 3.0f, 3.0f),
new TreeData(-10.0f, -29.0f, 1.0f, 3.0f),
new TreeData(-9.0f, -26.0f, 2.0f, 2.0f),
new TreeData(-6.0f, -22.0f, 2.0f, 3.0f),
new TreeData(-15.0f, -15.0f, 1.0f, 3.0f),
new TreeData(-8.0f, -11.0f, 2.0f, 3.0f),
new TreeData(-14.0f, -6.0f, 2.0f, 4.0f),
new TreeData(-12.0f, 0.0f, 2.0f, 3.0f),
new TreeData(-7.0f, 4.0f, 2.0f, 2.0f),
new TreeData(-13.0f, 8.0f, 2.0f, 2.0f),
new TreeData(-9.0f, 13.0f, 1.0f, 3.0f),
new TreeData(-13.0f, 17.0f, 3.0f, 4.0f),
new TreeData(-6.0f, 23.0f, 2.0f, 3.0f),
new TreeData(-12.0f, 27.0f, 1.0f, 2.0f),
new TreeData(-8.0f, 32.0f, 2.0f, 3.0f),
new TreeData(-10.0f, 37.0f, 3.0f, 3.0f),
new TreeData(-11.0f, 42.0f, 2.0f, 2.0f),
//
new TreeData(15.0f, 5.0f, 2.0f, 3.0f),
new TreeData(15.0f, 10.0f, 2.0f, 3.0f),
new TreeData(15.0f, 15.0f, 2.0f, 3.0f),
new TreeData(15.0f, 20.0f, 2.0f, 3.0f),
new TreeData(15.0f, 25.0f, 2.0f, 3.0f),
new TreeData(15.0f, 30.0f, 2.0f, 3.0f),
new TreeData(15.0f, 35.0f, 2.0f, 3.0f),
new TreeData(15.0f, 40.0f, 2.0f, 3.0f),
new TreeData(15.0f, 45.0f, 2.0f, 3.0f),
//
new TreeData(25.0f, 5.0f, 2.0f, 3.0f),
new TreeData(25.0f, 10.0f, 2.0f, 3.0f),
new TreeData(25.0f, 15.0f, 2.0f, 3.0f),
new TreeData(25.0f, 20.0f, 2.0f, 3.0f),
new TreeData(25.0f, 25.0f, 2.0f, 3.0f),
new TreeData(25.0f, 30.0f, 2.0f, 3.0f),
new TreeData(25.0f, 35.0f, 2.0f, 3.0f),
new TreeData(25.0f, 40.0f, 2.0f, 3.0f),
new TreeData(25.0f, 45.0f, 2.0f, 3.0f)};
class TreeData {
float xPos;
float zPos;
float trunkHeight;
float coneHeight;
TreeData(float xPos, float zPos, float trunkHeight, float coneHeight) {
this.xPos = xPos;
this.zPos = zPos;
this.trunkHeight = trunkHeight;
this.coneHeight = coneHeight;
}
}
}
| |
/*******************************************************************************
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package com.google.cloud.dataflow.sdk.runners.worker;
import static com.google.cloud.dataflow.sdk.util.Structs.getBytes;
import com.google.api.services.dataflow.model.FlattenInstruction;
import com.google.api.services.dataflow.model.InstructionInput;
import com.google.api.services.dataflow.model.InstructionOutput;
import com.google.api.services.dataflow.model.MapTask;
import com.google.api.services.dataflow.model.ParDoInstruction;
import com.google.api.services.dataflow.model.ParallelInstruction;
import com.google.api.services.dataflow.model.PartialGroupByKeyInstruction;
import com.google.api.services.dataflow.model.ReadInstruction;
import com.google.api.services.dataflow.model.WriteInstruction;
import com.google.cloud.dataflow.sdk.coders.Coder;
import com.google.cloud.dataflow.sdk.coders.KvCoder;
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
import com.google.cloud.dataflow.sdk.transforms.Combine;
import com.google.cloud.dataflow.sdk.transforms.windowing.BoundedWindow;
import com.google.cloud.dataflow.sdk.util.AppliedCombineFn;
import com.google.cloud.dataflow.sdk.util.CloudObject;
import com.google.cloud.dataflow.sdk.util.CoderUtils;
import com.google.cloud.dataflow.sdk.util.ExecutionContext;
import com.google.cloud.dataflow.sdk.util.PropertyNames;
import com.google.cloud.dataflow.sdk.util.SerializableUtils;
import com.google.cloud.dataflow.sdk.util.Serializer;
import com.google.cloud.dataflow.sdk.util.WindowedValue;
import com.google.cloud.dataflow.sdk.util.WindowedValue.WindowedValueCoder;
import com.google.cloud.dataflow.sdk.util.common.CounterSet;
import com.google.cloud.dataflow.sdk.util.common.ElementByteSizeObservable;
import com.google.cloud.dataflow.sdk.util.common.ElementByteSizeObserver;
import com.google.cloud.dataflow.sdk.util.common.worker.ElementCounter;
import com.google.cloud.dataflow.sdk.util.common.worker.FlattenOperation;
import com.google.cloud.dataflow.sdk.util.common.worker.MapTaskExecutor;
import com.google.cloud.dataflow.sdk.util.common.worker.Operation;
import com.google.cloud.dataflow.sdk.util.common.worker.OutputReceiver;
import com.google.cloud.dataflow.sdk.util.common.worker.ParDoFn;
import com.google.cloud.dataflow.sdk.util.common.worker.ParDoOperation;
import com.google.cloud.dataflow.sdk.util.common.worker.PartialGroupByKeyOperation;
import com.google.cloud.dataflow.sdk.util.common.worker.PartialGroupByKeyOperation.GroupingKeyCreator;
import com.google.cloud.dataflow.sdk.util.common.worker.ReadOperation;
import com.google.cloud.dataflow.sdk.util.common.worker.Reader;
import com.google.cloud.dataflow.sdk.util.common.worker.ReceivingOperation;
import com.google.cloud.dataflow.sdk.util.common.worker.Sink;
import com.google.cloud.dataflow.sdk.util.common.worker.StateSampler;
import com.google.cloud.dataflow.sdk.util.common.worker.WriteOperation;
import com.google.cloud.dataflow.sdk.values.KV;
import org.joda.time.Instant;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Nullable;
/**
* Creates a MapTaskExecutor from a MapTask definition.
*/
public class MapTaskExecutorFactory {
/**
* Creates a new MapTaskExecutor from the given MapTask definition.
*/
public static MapTaskExecutor create(
PipelineOptions options, MapTask mapTask, DataflowExecutionContext context) throws Exception {
List<Operation> operations = new ArrayList<>();
CounterSet counters = new CounterSet();
String counterPrefix = mapTask.getStageName() + "-";
StateSampler stateSampler = new StateSampler(counterPrefix, counters.getAddCounterMutator());
// Open-ended state.
stateSampler.setState("other");
// Instantiate operations for each instruction in the graph.
for (ParallelInstruction instruction : mapTask.getInstructions()) {
operations.add(createOperation(options, instruction, context, operations, counterPrefix,
counters.getAddCounterMutator(), stateSampler));
}
return new MapTaskExecutor(operations, counters, stateSampler);
}
/**
* Creates an Operation from the given ParallelInstruction definition.
*/
static Operation createOperation(
PipelineOptions options,
ParallelInstruction instruction,
DataflowExecutionContext executionContext,
List<Operation> priorOperations,
String counterPrefix,
CounterSet.AddCounterMutator addCounterMutator,
StateSampler stateSampler)
throws Exception {
if (instruction.getRead() != null) {
return createReadOperation(options, instruction, executionContext, priorOperations,
counterPrefix, addCounterMutator, stateSampler);
} else if (instruction.getWrite() != null) {
return createWriteOperation(options, instruction, executionContext, priorOperations,
counterPrefix, addCounterMutator, stateSampler);
} else if (instruction.getParDo() != null) {
return createParDoOperation(options, instruction, executionContext, priorOperations,
counterPrefix, addCounterMutator, stateSampler);
} else if (instruction.getPartialGroupByKey() != null) {
return createPartialGroupByKeyOperation(options, instruction, executionContext,
priorOperations, counterPrefix, addCounterMutator, stateSampler);
} else if (instruction.getFlatten() != null) {
return createFlattenOperation(options, instruction, executionContext, priorOperations,
counterPrefix, addCounterMutator, stateSampler);
} else {
throw new Exception("Unexpected instruction: " + instruction);
}
}
static ReadOperation createReadOperation(
PipelineOptions options,
ParallelInstruction instruction,
DataflowExecutionContext executionContext,
@SuppressWarnings("unused") List<Operation> priorOperations,
String counterPrefix,
CounterSet.AddCounterMutator addCounterMutator,
StateSampler stateSampler)
throws Exception {
ReadInstruction read = instruction.getRead();
Reader<?> reader = ReaderFactory.create(options, read.getSource(), executionContext);
OutputReceiver[] receivers =
createOutputReceivers(instruction, counterPrefix, addCounterMutator, stateSampler, 1);
return new ReadOperation(instruction.getSystemName(), reader, receivers, counterPrefix,
addCounterMutator, stateSampler);
}
static WriteOperation createWriteOperation(PipelineOptions options,
ParallelInstruction instruction, ExecutionContext executionContext,
List<Operation> priorOperations, String counterPrefix,
CounterSet.AddCounterMutator addCounterMutator, StateSampler stateSampler) throws Exception {
WriteInstruction write = instruction.getWrite();
Sink<?> sink =
SinkFactory.create(options, write.getSink(), executionContext, addCounterMutator);
OutputReceiver[] receivers =
createOutputReceivers(instruction, counterPrefix, addCounterMutator, stateSampler, 0);
WriteOperation operation = new WriteOperation(instruction.getSystemName(), sink, receivers,
counterPrefix, addCounterMutator, stateSampler);
attachInput(operation, write.getInput(), priorOperations);
return operation;
}
private static ParDoFnFactory parDoFnFactory = new ParDoFnFactory.DefaultFactory();
static ParDoOperation createParDoOperation(
PipelineOptions options,
ParallelInstruction instruction,
DataflowExecutionContext executionContext,
List<Operation> priorOperations,
String counterPrefix,
CounterSet.AddCounterMutator addCounterMutator,
StateSampler stateSampler)
throws Exception {
ParDoInstruction parDo = instruction.getParDo();
ParDoFn fn = parDoFnFactory.create(
options,
CloudObject.fromSpec(parDo.getUserFn()),
instruction.getSystemName(),
instruction.getName(),
parDo.getSideInputs(),
parDo.getMultiOutputInfos(),
parDo.getNumOutputs(),
executionContext,
addCounterMutator,
stateSampler);
OutputReceiver[] receivers = createOutputReceivers(
instruction, counterPrefix, addCounterMutator, stateSampler, parDo.getNumOutputs());
ParDoOperation operation = new ParDoOperation(
instruction.getSystemName(), fn, receivers, counterPrefix, addCounterMutator, stateSampler);
attachInput(operation, parDo.getInput(), priorOperations);
return operation;
}
static PartialGroupByKeyOperation createPartialGroupByKeyOperation(
@SuppressWarnings("unused") PipelineOptions options,
ParallelInstruction instruction,
@SuppressWarnings("unused") ExecutionContext executionContext,
List<Operation> priorOperations, String counterPrefix,
CounterSet.AddCounterMutator addCounterMutator, StateSampler stateSampler) throws Exception {
PartialGroupByKeyInstruction pgbk = instruction.getPartialGroupByKey();
Coder<?> windowedCoder = Serializer.deserialize(pgbk.getInputElementCodec(), Coder.class);
if (!(windowedCoder instanceof WindowedValueCoder)) {
throw new Exception(
"unexpected kind of input coder for PartialGroupByKeyOperation: " + windowedCoder);
}
Coder<?> elemCoder = ((WindowedValueCoder<?>) windowedCoder).getValueCoder();
if (!(elemCoder instanceof KvCoder)) {
throw new Exception(
"unexpected kind of input element coder for PartialGroupByKeyOperation: " + elemCoder);
}
KvCoder<?, ?> kvCoder = (KvCoder<?, ?>) elemCoder;
Coder<?> keyCoder = kvCoder.getKeyCoder();
Coder<?> valueCoder = kvCoder.getValueCoder();
OutputReceiver[] receivers =
createOutputReceivers(instruction, counterPrefix, addCounterMutator, stateSampler, 1);
PartialGroupByKeyOperation.Combiner<?, ?, ?, ?> valueCombiner = createValueCombiner(pgbk);
PartialGroupByKeyOperation operation = new PartialGroupByKeyOperation(
instruction.getSystemName(),
new WindowingCoderGroupingKeyCreator<>(keyCoder),
new CoderSizeEstimator<>(WindowedValue.getValueOnlyCoder(keyCoder)),
new CoderSizeEstimator<>(valueCoder), 0.001 /*sizeEstimatorSampleRate*/, valueCombiner,
PairInfo.create(), receivers, counterPrefix, addCounterMutator, stateSampler);
attachInput(operation, pgbk.getInput(), priorOperations);
return operation;
}
@SuppressWarnings({"rawtypes", "unchecked"})
static ValueCombiner createValueCombiner(PartialGroupByKeyInstruction pgbk) throws Exception {
if (pgbk.getValueCombiningFn() == null) {
return null;
}
Object deserializedFn = SerializableUtils.deserializeFromByteArray(
getBytes(CloudObject.fromSpec(pgbk.getValueCombiningFn()), PropertyNames.SERIALIZED_FN),
"serialized combine fn");
return new ValueCombiner(((AppliedCombineFn) deserializedFn).getFn());
}
/**
* Implements PGBKOp.Combiner via Combine.KeyedCombineFn.
*/
public static class ValueCombiner<K, InputT, AccumT, OutputT>
implements PartialGroupByKeyOperation.Combiner<WindowedValue<K>, InputT, AccumT, OutputT> {
private final Combine.KeyedCombineFn<K, InputT, AccumT, OutputT> combineFn;
private ValueCombiner(Combine.KeyedCombineFn<K, InputT, AccumT, OutputT> combineFn) {
this.combineFn = combineFn;
}
@Override
public AccumT createAccumulator(WindowedValue<K> windowedKey) {
return this.combineFn.createAccumulator(windowedKey.getValue());
}
@Override
public AccumT add(WindowedValue<K> windowedKey, AccumT accumulator, InputT value) {
return this.combineFn.addInput(windowedKey.getValue(), accumulator, value);
}
@Override
public AccumT merge(WindowedValue<K> windowedKey, Iterable<AccumT> accumulators) {
return this.combineFn.mergeAccumulators(windowedKey.getValue(), accumulators);
}
@Override
public OutputT extract(WindowedValue<K> windowedKey, AccumT accumulator) {
return this.combineFn.extractOutput(windowedKey.getValue(), accumulator);
}
}
/**
* Implements PGBKOp.PairInfo via KVs.
*/
public static class PairInfo implements PartialGroupByKeyOperation.PairInfo {
private static PairInfo theInstance = new PairInfo();
public static PairInfo create() {
return theInstance;
}
private PairInfo() {}
@Override
public Object getKeyFromInputPair(Object pair) {
@SuppressWarnings("unchecked")
WindowedValue<KV<?, ?>> windowedKv = (WindowedValue<KV<?, ?>>) pair;
return windowedKv.withValue(windowedKv.getValue().getKey());
}
@Override
public Object getValueFromInputPair(Object pair) {
@SuppressWarnings("unchecked")
WindowedValue<KV<?, ?>> windowedKv = (WindowedValue<KV<?, ?>>) pair;
return windowedKv.getValue().getValue();
}
@Override
public Object makeOutputPair(Object key, Object values) {
WindowedValue<?> windowedKey = (WindowedValue<?>) key;
return windowedKey.withValue(KV.of(windowedKey.getValue(), values));
}
}
/**
* Implements PGBKOp.GroupingKeyCreator via Coder.
*/
// TODO: Actually support window merging in the combiner table.
public static class WindowingCoderGroupingKeyCreator<K>
implements GroupingKeyCreator<WindowedValue<K>> {
private static final Instant ignored = BoundedWindow.TIMESTAMP_MIN_VALUE;
private final Coder<K> coder;
public WindowingCoderGroupingKeyCreator(Coder<K> coder) {
this.coder = coder;
}
@Override
public Object createGroupingKey(WindowedValue<K> key) throws Exception {
// Ignore timestamp for grouping purposes.
// The PGBK output will inherit the timestamp of one of its inputs.
return WindowedValue.of(
coder.structuralValue(key.getValue()),
ignored,
key.getWindows(),
key.getPane());
}
}
/**
* Implements PGBKOp.SizeEstimator via Coder.
*/
public static class CoderSizeEstimator<T>implements PartialGroupByKeyOperation.SizeEstimator<T> {
final Coder<T> coder;
public CoderSizeEstimator(Coder<T> coder) {
this.coder = coder;
}
@Override
public long estimateSize(T value) throws Exception {
return CoderUtils.encodeToByteArray(coder, value).length;
}
}
static FlattenOperation createFlattenOperation(
@SuppressWarnings("unused") PipelineOptions options,
ParallelInstruction instruction,
@SuppressWarnings("unused") ExecutionContext executionContext,
List<Operation> priorOperations, String counterPrefix,
CounterSet.AddCounterMutator addCounterMutator, StateSampler stateSampler) throws Exception {
FlattenInstruction flatten = instruction.getFlatten();
OutputReceiver[] receivers =
createOutputReceivers(instruction, counterPrefix, addCounterMutator, stateSampler, 1);
FlattenOperation operation = new FlattenOperation(
instruction.getSystemName(), receivers, counterPrefix, addCounterMutator, stateSampler);
for (InstructionInput input : flatten.getInputs()) {
attachInput(operation, input, priorOperations);
}
return operation;
}
/**
* Returns an array of OutputReceivers for the given
* ParallelInstruction definition.
*/
static OutputReceiver[] createOutputReceivers(ParallelInstruction instruction,
@SuppressWarnings("unused") String counterPrefix,
CounterSet.AddCounterMutator addCounterMutator,
@SuppressWarnings("unused") StateSampler stateSampler,
int expectedNumOutputs) throws Exception {
int numOutputs = 0;
if (instruction.getOutputs() != null) {
numOutputs = instruction.getOutputs().size();
}
if (numOutputs != expectedNumOutputs) {
throw new AssertionError("ParallelInstruction.Outputs has an unexpected length");
}
OutputReceiver[] receivers = new OutputReceiver[numOutputs];
for (int i = 0; i < numOutputs; i++) {
InstructionOutput cloudOutput = instruction.getOutputs().get(i);
receivers[i] = new OutputReceiver();
@SuppressWarnings("unchecked")
ElementCounter outputCounter = new DataflowOutputCounter(
cloudOutput.getName(),
new ElementByteSizeObservableCoder<>(
Serializer.deserialize(cloudOutput.getCodec(), Coder.class)),
addCounterMutator);
receivers[i].addOutputCounter(outputCounter);
}
return receivers;
}
/**
* Adapts a Coder to the ElementByteSizeObservable interface.
*/
public static class ElementByteSizeObservableCoder<T> implements ElementByteSizeObservable<T> {
final Coder<T> coder;
public ElementByteSizeObservableCoder(Coder<T> coder) {
this.coder = coder;
}
@Override
public boolean isRegisterByteSizeObserverCheap(T value) {
return coder.isRegisterByteSizeObserverCheap(value, Coder.Context.OUTER);
}
@Override
public void registerByteSizeObserver(T value, ElementByteSizeObserver observer)
throws Exception {
coder.registerByteSizeObserver(value, observer, Coder.Context.OUTER);
}
}
/**
* Adds an input to the given Operation, coming from the given
* producer instruction output.
*/
static void attachInput(ReceivingOperation operation, @Nullable InstructionInput input,
List<Operation> priorOperations) {
Integer producerInstructionIndex = 0;
Integer outputNum = 0;
if (input != null) {
if (input.getProducerInstructionIndex() != null) {
producerInstructionIndex = input.getProducerInstructionIndex();
}
if (input.getOutputNum() != null) {
outputNum = input.getOutputNum();
}
}
// Input id must refer to an operation that has already been seen.
Operation source = priorOperations.get(producerInstructionIndex);
operation.attachInput(source, outputNum);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import junit.framework.TestCase;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.IOException;
import java.util.Map;
import java.util.Properties;
/**
* Test case to run a MapReduce job.
* <p/>
* It runs a 2 node cluster Hadoop with a 2 node DFS.
* <p/>
* The JobConf to use must be obtained via the creatJobConf() method.
* <p/>
* It creates a temporary directory -accessible via getTestRootDir()-
* for both input and output.
* <p/>
* The input directory is accesible via getInputDir() and the output
* directory via getOutputDir()
* <p/>
* The DFS filesystem is formated before the testcase starts and after it ends.
*/
public abstract class ClusterMapReduceTestCase extends TestCase {
private MiniDFSCluster dfsCluster = null;
private MiniMRCluster mrCluster = null;
/**
* Creates Hadoop Cluster and DFS before a test case is run.
*
* @throws Exception
*/
protected void setUp() throws Exception {
super.setUp();
startCluster(true, null);
}
/**
* Starts the cluster within a testcase with single mapred-local-dir per
* TaskTracker.
* <p/>
* Note that the cluster is already started when the testcase method
* is invoked. This method is useful if as part of the testcase the
* cluster has to be shutdown and restarted again.
* <p/>
* If the cluster is already running this method does nothing.
*
* @param reformatDFS indicates if DFS has to be reformated
* @param props configuration properties to inject to the mini cluster
* @throws Exception if the cluster could not be started
*/
protected synchronized void startCluster(boolean reformatDFS,
Properties props) throws Exception {
startCluster(reformatDFS, props, 1);
}
/**
* Starts the cluster within a testcase with the given number of
* mapred-local-dirs per TaskTracker.
* <p/>
* Note that the cluster is already started when the testcase method
* is invoked. This method is useful if as part of the testcase the
* cluster has to be shutdown and restarted again.
* <p/>
* If the cluster is already running this method does nothing.
* @param reformatDFS indicates if DFS has to be reformated
* @param props configuration properties to inject to the mini cluster
* @param numDir
* @throws Exception if the cluster could not be started
*/
protected synchronized void startCluster(boolean reformatDFS,
Properties props, int numDir) throws Exception {
if (dfsCluster == null) {
JobConf conf = new JobConf();
if (props != null) {
for (Map.Entry entry : props.entrySet()) {
conf.set((String) entry.getKey(), (String) entry.getValue());
}
}
dfsCluster = new MiniDFSCluster(conf, 2, reformatDFS, null);
ConfigurableMiniMRCluster.setConfiguration(props);
//noinspection deprecation
mrCluster = new ConfigurableMiniMRCluster(2, getFileSystem().getName(),
numDir, conf);
}
}
private static class ConfigurableMiniMRCluster extends MiniMRCluster {
private static Properties config;
public static void setConfiguration(Properties props) {
config = props;
}
public ConfigurableMiniMRCluster(int numTaskTrackers, String namenode,
int numDir, JobConf conf)
throws Exception {
super(0,0, numTaskTrackers, namenode, numDir, null, null, null, conf);
}
public JobConf createJobConf() {
JobConf conf = super.createJobConf();
if (config != null) {
for (Map.Entry entry : config.entrySet()) {
conf.set((String) entry.getKey(), (String) entry.getValue());
}
}
return conf;
}
}
/**
* Stops the cluster within a testcase.
* <p/>
* Note that the cluster is already started when the testcase method
* is invoked. This method is useful if as part of the testcase the
* cluster has to be shutdown.
* <p/>
* If the cluster is already stopped this method does nothing.
*
* @throws Exception if the cluster could not be stopped
*/
protected void stopCluster() throws Exception {
if (mrCluster != null) {
mrCluster.shutdown();
mrCluster = null;
}
if (dfsCluster != null) {
dfsCluster.shutdown();
dfsCluster = null;
}
}
/**
* Destroys Hadoop Cluster and DFS after a test case is run.
*
* @throws Exception
*/
protected void tearDown() throws Exception {
stopCluster();
super.tearDown();
}
/**
* Returns a preconfigured Filesystem instance for test cases to read and
* write files to it.
* <p/>
* TestCases should use this Filesystem instance.
*
* @return the filesystem used by Hadoop.
* @throws IOException
*/
protected FileSystem getFileSystem() throws IOException {
return dfsCluster.getFileSystem();
}
protected MiniMRCluster getMRCluster() {
return mrCluster;
}
/**
* Returns the path to the root directory for the testcase.
*
* @return path to the root directory for the testcase.
*/
protected Path getTestRootDir() {
return new Path("x").getParent();
}
/**
* Returns a path to the input directory for the testcase.
*
* @return path to the input directory for the tescase.
*/
protected Path getInputDir() {
return new Path("input");
}
/**
* Returns a path to the output directory for the testcase.
*
* @return path to the output directory for the tescase.
*/
protected Path getOutputDir() {
return new Path("output");
}
/**
* Returns a job configuration preconfigured to run against the Hadoop
* managed by the testcase.
*
* @return configuration that works on the testcase Hadoop instance
*/
protected JobConf createJobConf() {
return mrCluster.createJobConf();
}
}
| |
/*
* Copyright 2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openehealth.ipf.commons.ihe.xds.core.validate.requests;
import org.openehealth.ipf.commons.core.modules.api.Validator;
import org.openehealth.ipf.commons.ihe.core.InteractionId;
import org.openehealth.ipf.commons.ihe.core.IpfInteractionId;
import org.openehealth.ipf.commons.ihe.xds.core.ebxml.EbXMLAdhocQueryRequest;
import org.openehealth.ipf.commons.ihe.xds.core.requests.query.QueryReturnType;
import org.openehealth.ipf.commons.ihe.xds.core.requests.query.QueryType;
import org.openehealth.ipf.commons.ihe.xds.core.transform.requests.QueryParameter;
import org.openehealth.ipf.commons.ihe.xds.core.validate.*;
import org.openehealth.ipf.commons.ihe.xds.core.validate.query.*;
import java.util.*;
import static org.apache.commons.lang3.Validate.notNull;
import static org.openehealth.ipf.commons.ihe.core.IpfInteractionId.*;
import static org.openehealth.ipf.commons.ihe.xds.core.requests.query.QueryType.*;
import static org.openehealth.ipf.commons.ihe.xds.core.transform.requests.QueryParameter.*;
import static org.openehealth.ipf.commons.ihe.xds.core.validate.ValidationMessage.*;
import static org.openehealth.ipf.commons.ihe.xds.core.validate.ValidatorAssertions.metaDataAssert;
/**
* Validates an {@link EbXMLAdhocQueryRequest}.
* @author Jens Riemschneider
*/
public class AdhocQueryRequestValidator implements Validator<EbXMLAdhocQueryRequest, ValidationProfile> {
private static final CXValidator cxValidator = new CXValidator();
private static final TimeValidator timeValidator = new TimeValidator();
private static final NopValidator nopValidator = new NopValidator();
private static void addAllowedMultipleSlots(QueryType queryType, QueryParameter... parameters) {
Set<String> slotNames = new HashSet<String>();
for (QueryParameter parameter : parameters) {
slotNames.add(parameter.getSlotName());
}
ALLOWED_MULTIPLE_SLOTS.put(queryType, slotNames);
}
private static final Map<QueryType, Set<String>> ALLOWED_MULTIPLE_SLOTS;
static {
ALLOWED_MULTIPLE_SLOTS = new HashMap<QueryType, Set<String>>();
addAllowedMultipleSlots(FIND_DOCUMENTS,
DOC_ENTRY_CLASS_CODE,
DOC_ENTRY_TYPE_CODE,
DOC_ENTRY_PRACTICE_SETTING_CODE,
DOC_ENTRY_HEALTHCARE_FACILITY_TYPE_CODE,
DOC_ENTRY_EVENT_CODE,
DOC_ENTRY_CONFIDENTIALITY_CODE,
DOC_ENTRY_AUTHOR_PERSON,
DOC_ENTRY_FORMAT_CODE,
DOC_ENTRY_STATUS);
addAllowedMultipleSlots(FIND_DOCUMENTS_MPQ,
DOC_ENTRY_PATIENT_ID,
DOC_ENTRY_CLASS_CODE,
DOC_ENTRY_TYPE_CODE,
DOC_ENTRY_PRACTICE_SETTING_CODE,
DOC_ENTRY_HEALTHCARE_FACILITY_TYPE_CODE,
DOC_ENTRY_EVENT_CODE,
DOC_ENTRY_CONFIDENTIALITY_CODE,
DOC_ENTRY_AUTHOR_PERSON,
DOC_ENTRY_FORMAT_CODE,
DOC_ENTRY_STATUS);
addAllowedMultipleSlots(FIND_SUBMISSION_SETS,
SUBMISSION_SET_SOURCE_ID,
SUBMISSION_SET_CONTENT_TYPE_CODE,
SUBMISSION_SET_STATUS);
addAllowedMultipleSlots(FIND_FOLDERS,
FOLDER_CODES,
FOLDER_STATUS);
addAllowedMultipleSlots(FIND_FOLDERS_MPQ,
FOLDER_PATIENT_ID,
FOLDER_CODES,
FOLDER_STATUS);
addAllowedMultipleSlots(GET_ALL,
DOC_ENTRY_STATUS,
SUBMISSION_SET_STATUS,
FOLDER_STATUS,
DOC_ENTRY_FORMAT_CODE,
DOC_ENTRY_CONFIDENTIALITY_CODE);
addAllowedMultipleSlots(GET_DOCUMENTS,
DOC_ENTRY_UUID,
DOC_ENTRY_UNIQUE_ID);
addAllowedMultipleSlots(GET_FOLDERS,
FOLDER_UUID,
FOLDER_UNIQUE_ID);
addAllowedMultipleSlots(GET_ASSOCIATIONS,
UUID);
addAllowedMultipleSlots(GET_DOCUMENTS_AND_ASSOCIATIONS,
DOC_ENTRY_UUID,
DOC_ENTRY_UNIQUE_ID);
addAllowedMultipleSlots(GET_SUBMISSION_SETS,
UUID);
addAllowedMultipleSlots(GET_SUBMISSION_SET_AND_CONTENTS,
DOC_ENTRY_FORMAT_CODE,
DOC_ENTRY_CONFIDENTIALITY_CODE);
addAllowedMultipleSlots(GET_FOLDER_AND_CONTENTS,
DOC_ENTRY_FORMAT_CODE,
DOC_ENTRY_CONFIDENTIALITY_CODE);
addAllowedMultipleSlots(GET_FOLDERS_FOR_DOCUMENT
/* empty list */);
addAllowedMultipleSlots(GET_RELATED_DOCUMENTS,
ASSOCIATION_TYPE);
addAllowedMultipleSlots(FETCH,
DOC_ENTRY_CLASS_CODE,
DOC_ENTRY_TYPE_CODE,
DOC_ENTRY_PRACTICE_SETTING_CODE,
DOC_ENTRY_HEALTHCARE_FACILITY_TYPE_CODE,
DOC_ENTRY_EVENT_CODE,
DOC_ENTRY_CONFIDENTIALITY_CODE,
DOC_ENTRY_AUTHOR_PERSON,
DOC_ENTRY_FORMAT_CODE);
}
private static final Map<List<InteractionId>, List<QueryType>> ALLOWED_QUERY_TYPES;
static {
ALLOWED_QUERY_TYPES = new HashMap<List<InteractionId>, List<QueryType>>(3);
ALLOWED_QUERY_TYPES.put(
Collections.<InteractionId> singletonList(ITI_16),
Collections.singletonList(SQL));
ALLOWED_QUERY_TYPES.put(
Arrays.<InteractionId> asList(ITI_18, ITI_38),
Arrays.asList(
FIND_DOCUMENTS,
FIND_SUBMISSION_SETS,
FIND_FOLDERS,
GET_ALL,
GET_DOCUMENTS,
GET_FOLDERS,
GET_ASSOCIATIONS,
GET_DOCUMENTS_AND_ASSOCIATIONS,
GET_SUBMISSION_SETS,
GET_SUBMISSION_SET_AND_CONTENTS,
GET_FOLDER_AND_CONTENTS,
GET_FOLDERS_FOR_DOCUMENT,
GET_RELATED_DOCUMENTS
));
ALLOWED_QUERY_TYPES.put(
Collections.<InteractionId> singletonList(ITI_51),
Arrays.asList(
FIND_DOCUMENTS_MPQ,
FIND_FOLDERS_MPQ
));
ALLOWED_QUERY_TYPES.put(
Collections.<InteractionId> singletonList(ITI_63),
Collections.singletonList(FETCH));
}
private QueryParameterValidation[] getValidators(QueryType queryType, ValidationProfile profile) {
boolean requireHomeCommunityId =
(profile.getProfile() == ValidationProfile.InteractionProfile.XCA) ||
(profile.getProfile() == ValidationProfile.InteractionProfile.XCF);
switch (queryType) {
case FETCH:
return new QueryParameterValidation[] {
new StringValidation(DOC_ENTRY_PATIENT_ID, cxValidator, false),
new CodeValidation(DOC_ENTRY_CLASS_CODE, false),
new CodeValidation(DOC_ENTRY_TYPE_CODE),
new CodeValidation(DOC_ENTRY_PRACTICE_SETTING_CODE),
new CodeValidation(DOC_ENTRY_HEALTHCARE_FACILITY_TYPE_CODE),
new CodeValidation(DOC_ENTRY_FORMAT_CODE),
new NumberValidation(DOC_ENTRY_CREATION_TIME_FROM, timeValidator),
new NumberValidation(DOC_ENTRY_CREATION_TIME_TO, timeValidator),
new NumberValidation(DOC_ENTRY_SERVICE_START_TIME_FROM, timeValidator),
new NumberValidation(DOC_ENTRY_SERVICE_START_TIME_TO, timeValidator),
new NumberValidation(DOC_ENTRY_SERVICE_STOP_TIME_FROM, timeValidator),
new NumberValidation(DOC_ENTRY_SERVICE_STOP_TIME_TO, timeValidator),
new QueryListCodeValidation(DOC_ENTRY_EVENT_CODE, DOC_ENTRY_EVENT_CODE_SCHEME),
new QueryListCodeValidation(DOC_ENTRY_CONFIDENTIALITY_CODE, DOC_ENTRY_CONFIDENTIALITY_CODE_SCHEME),
new StringListValidation(DOC_ENTRY_AUTHOR_PERSON, nopValidator),
new HomeCommunityIdValidation(true),
};
case FIND_DOCUMENTS:
case FIND_DOCUMENTS_MPQ:
return new QueryParameterValidation[] {
// PatientId MUST BE supplied in single patient query.
// PatientId (list) MAY BE supplied in multi patient query.
// The validators for the two cases are otherwise identical.
queryType.equals(FIND_DOCUMENTS)
? new StringValidation(DOC_ENTRY_PATIENT_ID, cxValidator, false)
: new StringListValidation(DOC_ENTRY_PATIENT_ID, cxValidator),
new CodeValidation(DOC_ENTRY_CLASS_CODE),
new CodeValidation(DOC_ENTRY_TYPE_CODE),
new CodeValidation(DOC_ENTRY_PRACTICE_SETTING_CODE),
new CodeValidation(DOC_ENTRY_HEALTHCARE_FACILITY_TYPE_CODE),
new CodeValidation(DOC_ENTRY_FORMAT_CODE),
new NumberValidation(DOC_ENTRY_CREATION_TIME_FROM, timeValidator),
new NumberValidation(DOC_ENTRY_CREATION_TIME_TO, timeValidator),
new NumberValidation(DOC_ENTRY_SERVICE_START_TIME_FROM, timeValidator),
new NumberValidation(DOC_ENTRY_SERVICE_START_TIME_TO, timeValidator),
new NumberValidation(DOC_ENTRY_SERVICE_STOP_TIME_FROM, timeValidator),
new NumberValidation(DOC_ENTRY_SERVICE_STOP_TIME_TO, timeValidator),
new QueryListCodeValidation(DOC_ENTRY_EVENT_CODE, DOC_ENTRY_EVENT_CODE_SCHEME),
new QueryListCodeValidation(DOC_ENTRY_CONFIDENTIALITY_CODE, DOC_ENTRY_CONFIDENTIALITY_CODE_SCHEME),
new StringListValidation(DOC_ENTRY_AUTHOR_PERSON, nopValidator),
new StatusValidation(DOC_ENTRY_STATUS),
new DocumentEntryTypeValidation(),
};
case FIND_SUBMISSION_SETS:
return new QueryParameterValidation[] {
new StringValidation(SUBMISSION_SET_PATIENT_ID, cxValidator, false),
// Excluded to avoid validation errors for xdstest requests
// new StringListValidation(SUBMISSION_SET_SOURCE_ID, oidValidator),
new NumberValidation(SUBMISSION_SET_SUBMISSION_TIME_FROM, timeValidator),
new NumberValidation(SUBMISSION_SET_SUBMISSION_TIME_TO, timeValidator),
new StringValidation(SUBMISSION_SET_AUTHOR_PERSON, nopValidator, true),
new CodeValidation(SUBMISSION_SET_CONTENT_TYPE_CODE),
new StatusValidation(SUBMISSION_SET_STATUS),
};
case FIND_FOLDERS:
case FIND_FOLDERS_MPQ:
return new QueryParameterValidation[] {
// PatientId MUST BE supplied in single patient query.
// PatientId (list) MAY BE supplied in multi patient query.
// The validators for the two cases are otherwise identical.
queryType.equals(FIND_FOLDERS) ? new StringValidation(FOLDER_PATIENT_ID, cxValidator, false):new StringListValidation(FOLDER_PATIENT_ID, cxValidator),
new NumberValidation(FOLDER_LAST_UPDATE_TIME_FROM, timeValidator),
new NumberValidation(FOLDER_LAST_UPDATE_TIME_TO, timeValidator),
new QueryListCodeValidation(FOLDER_CODES, FOLDER_CODES_SCHEME),
new StatusValidation(FOLDER_STATUS),
};
case GET_ALL:
return new QueryParameterValidation[] {
new StringValidation(PATIENT_ID, cxValidator, false),
new StatusValidation(DOC_ENTRY_STATUS),
new StatusValidation(SUBMISSION_SET_STATUS),
new StatusValidation(FOLDER_STATUS),
new QueryListCodeValidation(DOC_ENTRY_FORMAT_CODE, DOC_ENTRY_FORMAT_CODE_SCHEME),
new DocumentEntryTypeValidation(),
};
case GET_DOCUMENTS:
case GET_DOCUMENTS_AND_ASSOCIATIONS:
return new QueryParameterValidation[] {
new HomeCommunityIdValidation(requireHomeCommunityId),
new ChoiceValidation(DOC_ENTRY_UUID, DOC_ENTRY_UNIQUE_ID),
new StringListValidation(DOC_ENTRY_UUID, nopValidator),
new StringListValidation(DOC_ENTRY_UNIQUE_ID, nopValidator),
};
case GET_FOLDERS_FOR_DOCUMENT:
return new QueryParameterValidation[] {
new HomeCommunityIdValidation(requireHomeCommunityId),
new ChoiceValidation(DOC_ENTRY_UUID, DOC_ENTRY_UNIQUE_ID),
new StringValidation(DOC_ENTRY_UUID, nopValidator, true),
new StringValidation(DOC_ENTRY_UNIQUE_ID, nopValidator, true),
};
case GET_FOLDERS:
return new QueryParameterValidation[] {
new HomeCommunityIdValidation(requireHomeCommunityId),
new ChoiceValidation(FOLDER_UUID, FOLDER_UNIQUE_ID),
new StringListValidation(FOLDER_UUID, nopValidator),
new StringListValidation(FOLDER_UNIQUE_ID, nopValidator),
};
case GET_ASSOCIATIONS:
case GET_SUBMISSION_SETS:
return new QueryParameterValidation[] {
new HomeCommunityIdValidation(requireHomeCommunityId),
new StringListValidation(UUID, nopValidator),
};
case GET_SUBMISSION_SET_AND_CONTENTS:
return new QueryParameterValidation[] {
new HomeCommunityIdValidation(requireHomeCommunityId),
new ChoiceValidation(SUBMISSION_SET_UUID, SUBMISSION_SET_UNIQUE_ID),
new StringValidation(SUBMISSION_SET_UUID, nopValidator, true),
new StringValidation(SUBMISSION_SET_UNIQUE_ID, nopValidator, true),
new QueryListCodeValidation(DOC_ENTRY_CONFIDENTIALITY_CODE, DOC_ENTRY_CONFIDENTIALITY_CODE_SCHEME),
new QueryListCodeValidation(DOC_ENTRY_FORMAT_CODE, DOC_ENTRY_FORMAT_CODE_SCHEME),
new DocumentEntryTypeValidation(),
};
case GET_FOLDER_AND_CONTENTS:
return new QueryParameterValidation[] {
new HomeCommunityIdValidation(requireHomeCommunityId),
new ChoiceValidation(FOLDER_UUID, FOLDER_UNIQUE_ID),
new StringValidation(FOLDER_UUID, nopValidator, true),
new StringValidation(FOLDER_UNIQUE_ID, nopValidator, true),
new QueryListCodeValidation(DOC_ENTRY_CONFIDENTIALITY_CODE, DOC_ENTRY_CONFIDENTIALITY_CODE_SCHEME),
new QueryListCodeValidation(DOC_ENTRY_FORMAT_CODE, DOC_ENTRY_FORMAT_CODE_SCHEME),
new DocumentEntryTypeValidation(),
};
case GET_RELATED_DOCUMENTS:
return new QueryParameterValidation[] {
new HomeCommunityIdValidation(requireHomeCommunityId),
new ChoiceValidation(DOC_ENTRY_UUID, DOC_ENTRY_UNIQUE_ID),
new StringValidation(DOC_ENTRY_UUID, nopValidator, true),
new StringValidation(DOC_ENTRY_UNIQUE_ID, nopValidator, true),
new AssociationValidation(ASSOCIATION_TYPE),
new DocumentEntryTypeValidation(),
};
}
return null; // should not occur
}
@Override
public void validate(EbXMLAdhocQueryRequest request, ValidationProfile profile) {
notNull(request, "request cannot be null");
if (profile.getInteractionId() == IpfInteractionId.ITI_63) {
metaDataAssert(QueryReturnType.LEAF_CLASS_WITH_REPOSITORY_ITEM.getCode().equals(request.getReturnType()),
UNKNOWN_RETURN_TYPE, request.getReturnType());
} else {
metaDataAssert(QueryReturnType.LEAF_CLASS.getCode().equals(request.getReturnType())
|| QueryReturnType.OBJECT_REF.getCode().equals(request.getReturnType()),
UNKNOWN_RETURN_TYPE, request.getReturnType());
}
QueryType queryType = QueryType.valueOfId(request.getId());
metaDataAssert(queryType != null, UNKNOWN_QUERY_TYPE, request.getId());
boolean found = false;
for(Map.Entry<List<InteractionId>, List<QueryType>> entry : ALLOWED_QUERY_TYPES.entrySet()) {
if (entry.getKey().contains(profile.getInteractionId())) {
metaDataAssert(entry.getValue().contains(queryType), UNSUPPORTED_QUERY_TYPE, queryType);
found = true;
break;
}
}
metaDataAssert(found, UNKNOWN_QUERY_TYPE, queryType);
if (queryType == QueryType.SQL) {
metaDataAssert(request.getSql() != null, MISSING_SQL_QUERY_TEXT);
} else {
new SlotLengthAndNameUniquenessValidator().validateSlots(
request.getSlots(),
ALLOWED_MULTIPLE_SLOTS.get(queryType));
for (QueryParameterValidation validation : getValidators(queryType, profile)) {
validation.validate(request);
}
if (queryType == FIND_DOCUMENTS_MPQ) {
metaDataAssert(
(! request.getSlotValues(DOC_ENTRY_CLASS_CODE.getSlotName()).isEmpty()) ||
(! request.getSlotValues(DOC_ENTRY_EVENT_CODE.getSlotName()).isEmpty()) ||
(! request.getSlotValues(DOC_ENTRY_HEALTHCARE_FACILITY_TYPE_CODE.getSlotName()).isEmpty()),
ValidationMessage.MISSING_REQUIRED_QUERY_PARAMETER,
"at least one of $XDSDocumentEntryClassCode, $XDSDocumentEntryEventCodeList, $XDSDocumentEntryHealthcareFacilityTypeCode");
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.math;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.io.StreamCorruptedException;
import org.apache.harmony.math.internal.nls.Messages;
/**
* @author Intel Middleware Product Division
* @author Instituto Tecnologico de Cordoba
*/
public class MathContext implements Serializable {
/* Fields */
/** @ar.org.fitc.spec_ref */
public static final MathContext DECIMAL128 = new MathContext(34,
RoundingMode.HALF_EVEN);
/** @ar.org.fitc.spec_ref */
public static final MathContext DECIMAL32 = new MathContext(7,
RoundingMode.HALF_EVEN);
/** @ar.org.fitc.spec_ref */
public static final MathContext DECIMAL64 = new MathContext(16,
RoundingMode.HALF_EVEN);
/** @ar.org.fitc.spec_ref */
public static final MathContext UNLIMITED = new MathContext(0,
RoundingMode.HALF_UP);
/** @ar.org.fitc.spec_ref */
private static final long serialVersionUID = 5579720004786848255L;
/**
* The number of digits to be used for an operation;
* results are rounded to this precision.
*/
private int precision;
/**
* A {@code RoundingMode} object which specifies
* the algorithm to be used for rounding.
*/
private RoundingMode roundingMode;
/**
* An array of {@code char} containing:
* {@code 'p','r','e','c','i','s','i','o','n','='}.
* It's used to improve the methods related to {@code String} conversion.
* @see #MathContext(String)
* @see #toString()
*/
private final static char[] chPrecision = { 'p', 'r', 'e', 'c', 'i', 's',
'i', 'o', 'n', '=' };
/**
* An array of {@code char} containing:
* {@code 'r','o','u','n','d','i','n','g','M','o','d','e','='}.
* It's used to improve the methods related to {@code String} conversion.
* @see #MathContext(String)
* @see #toString()
*/
private final static char[] chRoundingMode = { 'r', 'o', 'u', 'n', 'd',
'i', 'n', 'g', 'M', 'o', 'd', 'e', '=' };
/* Constructors */
/** @ar.org.fitc.spec_ref */
public MathContext(int setPrecision) {
this(setPrecision, RoundingMode.HALF_UP);
}
/** @ar.org.fitc.spec_ref */
public MathContext(int setPrecision, RoundingMode setRoundingMode) {
if (setPrecision < 0) {
// math.0C=Digits < 0
throw new IllegalArgumentException(Messages.getString("math.0C")); //$NON-NLS-1$
}
if (setRoundingMode == null) {
// math.0D=null RoundingMode
throw new NullPointerException(Messages.getString("math.0D")); //$NON-NLS-1$
}
precision = setPrecision;
roundingMode = setRoundingMode;
}
/** @ar.org.fitc.spec_ref */
public MathContext(String val) {
char[] charVal = val.toCharArray();
int i; // Index of charVal
int j; // Index of chRoundingMode
int digit; // It will contain the digit parsed
if ((charVal.length < 27) || (charVal.length > 45)) {
// math.0E=bad string format
throw new IllegalArgumentException(Messages.getString("math.0E")); //$NON-NLS-1$
}
// Parsing "precision=" String
for (i = 0; (i < chPrecision.length) && (charVal[i] == chPrecision[i]); i++) {
;
}
if (i < chPrecision.length) {
// math.0E=bad string format
throw new IllegalArgumentException(Messages.getString("math.0E")); //$NON-NLS-1$
}
// Parsing the value for "precision="...
digit = Character.digit(charVal[i], 10);
if (digit == -1) {
// math.0E=bad string format
throw new IllegalArgumentException(Messages.getString("math.0E")); //$NON-NLS-1$
}
this.precision = this.precision * 10 + digit;
i++;
do {
digit = Character.digit(charVal[i], 10);
if (digit == -1) {
if (charVal[i] == ' ') {
// It parsed all the digits
i++;
break;
}
// It isn't a valid digit, and isn't a white space
// math.0E=bad string format
throw new IllegalArgumentException(Messages.getString("math.0E")); //$NON-NLS-1$
}
// Accumulating the value parsed
this.precision = this.precision * 10 + digit;
if (this.precision < 0) {
// math.0E=bad string format
throw new IllegalArgumentException(Messages.getString("math.0E")); //$NON-NLS-1$
}
i++;
} while (true);
// Parsing "roundingMode="
for (j = 0; (j < chRoundingMode.length)
&& (charVal[i] == chRoundingMode[j]); i++, j++) {
;
}
if (j < chRoundingMode.length) {
// math.0E=bad string format
throw new IllegalArgumentException(Messages.getString("math.0E")); //$NON-NLS-1$
}
// Parsing the value for "roundingMode"...
this.roundingMode = RoundingMode.valueOf(String.valueOf(charVal, i,
charVal.length - i));
}
/* Public Methods */
/** @ar.org.fitc.spec_ref */
public int getPrecision() {
return precision;
}
/** @ar.org.fitc.spec_ref */
public RoundingMode getRoundingMode() {
return roundingMode;
}
/** @ar.org.fitc.spec_ref */
@Override
public boolean equals(Object x) {
return ((x instanceof MathContext)
&& (((MathContext) x).getPrecision() == precision) && (((MathContext) x)
.getRoundingMode() == roundingMode));
}
/** @ar.org.fitc.spec_ref */
@Override
public int hashCode() {
// Make place for the necessary bits to represent 8 rounding modes
return ((precision << 3) | roundingMode.ordinal());
}
/** @ar.org.fitc.spec_ref */
@Override
public String toString() {
StringBuffer sb = new StringBuffer(45);
sb.append(chPrecision);
sb.append(precision);
sb.append(' ');
sb.append(chRoundingMode);
sb.append(roundingMode);
return sb.toString();
}
/** @ar.org.fitc.spec_ref */
private void readObject(ObjectInputStream s) throws IOException,
ClassNotFoundException {
s.defaultReadObject();
if (precision < 0) {
// math.0F=bad precision value
throw new StreamCorruptedException(Messages.getString("math.0F")); //$NON-NLS-1$
}
if (roundingMode == null) {
// math.10=null roundingMode
throw new StreamCorruptedException(Messages.getString("math.10")); //$NON-NLS-1$
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.document;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Iterables.size;
import static java.util.concurrent.TimeUnit.HOURS;
import static java.util.concurrent.TimeUnit.MINUTES;
import static org.apache.jackrabbit.oak.commons.FixturesHelper.Fixture.DOCUMENT_MEM;
import static org.apache.jackrabbit.oak.commons.FixturesHelper.Fixture.DOCUMENT_NS;
import static org.apache.jackrabbit.oak.commons.FixturesHelper.Fixture.DOCUMENT_RDB;
import static org.apache.jackrabbit.oak.commons.FixturesHelper.getFixtures;
import static org.apache.jackrabbit.oak.plugins.document.Collection.NODES;
import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.NUM_REVS_THRESHOLD;
import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.PREV_SPLIT_FACTOR;
import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.SplitDocType;
import static org.apache.jackrabbit.oak.plugins.document.TestUtils.NO_BINARY;
import static org.apache.jackrabbit.oak.plugins.document.VersionGarbageCollector.VersionGCStats;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import com.google.common.collect.Queues;
import com.google.common.collect.Sets;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.plugins.document.util.Utils;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.stats.Clock;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class VersionGarbageCollectorIT {
private DocumentStoreFixture fixture;
private Clock clock;
private DocumentNodeStore store;
private VersionGarbageCollector gc;
private ExecutorService execService;
public VersionGarbageCollectorIT(DocumentStoreFixture fixture) {
this.fixture = fixture;
}
@Parameterized.Parameters(name="{0}")
public static Collection<Object[]> fixtures() throws IOException {
List<Object[]> fixtures = Lists.newArrayList();
DocumentStoreFixture mongo = new DocumentStoreFixture.MongoFixture();
if (getFixtures().contains(DOCUMENT_NS) && mongo.isAvailable()) {
fixtures.add(new Object[] { mongo });
}
DocumentStoreFixture rdb = new DocumentStoreFixture.RDBFixture();
if (getFixtures().contains(DOCUMENT_RDB) && rdb.isAvailable()) {
fixtures.add(new Object[] { rdb });
}
if (fixtures.isEmpty() || getFixtures().contains(DOCUMENT_MEM)) {
fixtures.add(new Object[] { new DocumentStoreFixture.MemoryFixture() });
}
return fixtures;
}
@Before
public void setUp() throws InterruptedException {
execService = Executors.newCachedThreadPool();
clock = new Clock.Virtual();
clock.waitUntil(System.currentTimeMillis());
Revision.setClock(clock);
store = new DocumentMK.Builder()
.clock(clock)
.setLeaseCheck(false)
.setDocumentStore(fixture.createDocumentStore())
.setAsyncDelay(0)
.getNodeStore();
gc = store.getVersionGarbageCollector();
}
@After
public void tearDown() throws Exception {
store.dispose();
Revision.resetClockToDefault();
execService.shutdown();
execService.awaitTermination(1, MINUTES);
fixture.dispose();
}
@Test
public void gcIgnoredForCheckpoint() throws Exception {
long expiryTime = 100, maxAge = 20;
Revision cp = Revision.fromString(store.checkpoint(expiryTime));
//Fast forward time to future but before expiry of checkpoint
clock.waitUntil(cp.getTimestamp() + expiryTime - maxAge);
VersionGCStats stats = gc.gc(maxAge, TimeUnit.MILLISECONDS);
assertTrue(stats.ignoredGCDueToCheckPoint);
//Fast forward time to future such that checkpoint get expired
clock.waitUntil(clock.getTime() + expiryTime + 1);
stats = gc.gc(maxAge, TimeUnit.MILLISECONDS);
assertFalse("GC should be performed", stats.ignoredGCDueToCheckPoint);
}
@Test
public void testGCDeletedDocument() throws Exception{
//1. Create nodes
NodeBuilder b1 = store.getRoot().builder();
b1.child("x").child("y");
b1.child("z");
store.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY);
long maxAge = 1; //hours
long delta = TimeUnit.MINUTES.toMillis(10);
//1. Go past GC age and check no GC done as nothing deleted
clock.waitUntil(Revision.getCurrentTimestamp() + maxAge);
VersionGCStats stats = gc.gc(maxAge, HOURS);
assertEquals(0, stats.deletedDocGCCount);
//Remove x/y
NodeBuilder b2 = store.getRoot().builder();
b2.child("x").child("y").remove();
store.merge(b2, EmptyHook.INSTANCE, CommitInfo.EMPTY);
store.runBackgroundOperations();
//2. Check that a deleted doc is not collected before
//maxAge
//Clock cannot move back (it moved forward in #1) so double the maxAge
clock.waitUntil(clock.getTime() + delta);
stats = gc.gc(maxAge*2, HOURS);
assertEquals(0, stats.deletedDocGCCount);
//3. Check that deleted doc does get collected post maxAge
clock.waitUntil(clock.getTime() + HOURS.toMillis(maxAge*2) + delta);
stats = gc.gc(maxAge*2, HOURS);
assertEquals(1, stats.deletedDocGCCount);
//4. Check that a revived doc (deleted and created again) does not get gc
NodeBuilder b3 = store.getRoot().builder();
b3.child("z").remove();
store.merge(b3, EmptyHook.INSTANCE, CommitInfo.EMPTY);
NodeBuilder b4 = store.getRoot().builder();
b4.child("z");
store.merge(b4, EmptyHook.INSTANCE, CommitInfo.EMPTY);
clock.waitUntil(clock.getTime() + HOURS.toMillis(maxAge*2) + delta);
stats = gc.gc(maxAge*2, HOURS);
assertEquals(0, stats.deletedDocGCCount);
}
@Test
public void gcSplitDocs() throws Exception {
gcSplitDocsInternal("foo");
}
@Test
public void gcLongPathSplitDocs() throws Exception {
gcSplitDocsInternal(Strings.repeat("sub", 120));
}
private void gcSplitDocsInternal(String subNodeName) throws Exception {
long maxAge = 1; //hrs
long delta = TimeUnit.MINUTES.toMillis(10);
NodeBuilder b1 = store.getRoot().builder();
b1.child("test").child(subNodeName).child("bar");
b1.child("test2").child(subNodeName);
store.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY);
//Commit on a node which has a child and where the commit root
// is parent
for (int i = 0; i < NUM_REVS_THRESHOLD; i++) {
b1 = store.getRoot().builder();
//This updates a middle node i.e. one which has child bar
//Should result in SplitDoc of type PROP_COMMIT_ONLY
b1.child("test").child(subNodeName).setProperty("prop",i);
//This should result in SplitDoc of type DEFAULT_NO_CHILD
b1.child("test2").child(subNodeName).setProperty("prop", i);
store.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY);
}
store.runBackgroundOperations();
List<NodeDocument> previousDocTestFoo =
ImmutableList.copyOf(getDoc("/test/" + subNodeName).getAllPreviousDocs());
List<NodeDocument> previousDocTestFoo2 =
ImmutableList.copyOf(getDoc("/test2/" + subNodeName).getAllPreviousDocs());
assertEquals(1, previousDocTestFoo.size());
assertEquals(1, previousDocTestFoo2.size());
assertEquals(SplitDocType.COMMIT_ROOT_ONLY, previousDocTestFoo.get(0).getSplitDocType());
assertEquals(SplitDocType.DEFAULT_LEAF, previousDocTestFoo2.get(0).getSplitDocType());
clock.waitUntil(clock.getTime() + HOURS.toMillis(maxAge) + delta);
VersionGCStats stats = gc.gc(maxAge, HOURS);
assertEquals(2, stats.splitDocGCCount);
//Previous doc should be removed
assertNull(getDoc(previousDocTestFoo.get(0).getPath()));
assertNull(getDoc(previousDocTestFoo2.get(0).getPath()));
//Following would not work for Mongo as the delete happened on the server side
//And entries from cache are not evicted
//assertTrue(ImmutableList.copyOf(getDoc("/test2/foo").getAllPreviousDocs()).isEmpty());
}
// OAK-1729
@Test
public void gcIntermediateDocs() throws Exception {
long maxAge = 1; //hrs
long delta = TimeUnit.MINUTES.toMillis(10);
NodeBuilder b1 = store.getRoot().builder();
// adding the test node will cause the commit root to be placed
// on the root document, because the children flag is set on the
// root document
b1.child("test");
store.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY);
assertTrue(getDoc("/test").getLocalRevisions().isEmpty());
// setting the test property afterwards will use the new test document
// as the commit root. this what we want for the test.
b1 = store.getRoot().builder();
b1.child("test").setProperty("test", "value");
store.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY);
assertTrue(!getDoc("/test").getLocalRevisions().isEmpty());
for (int i = 0; i < PREV_SPLIT_FACTOR; i++) {
for (int j = 0; j < NUM_REVS_THRESHOLD; j++) {
b1 = store.getRoot().builder();
b1.child("test").setProperty("prop", i * NUM_REVS_THRESHOLD + j);
store.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY);
}
store.runBackgroundOperations();
}
// trigger another split, now that we have 10 previous docs
// this will create an intermediate previous doc
store.addSplitCandidate(Utils.getIdFromPath("/test"));
store.runBackgroundOperations();
Map<Revision, Range> prevRanges = getDoc("/test").getPreviousRanges();
boolean hasIntermediateDoc = false;
for (Map.Entry<Revision, Range> entry : prevRanges.entrySet()) {
if (entry.getValue().getHeight() > 0) {
hasIntermediateDoc = true;
break;
}
}
assertTrue("Test data does not have intermediate previous docs",
hasIntermediateDoc);
clock.waitUntil(clock.getTime() + HOURS.toMillis(maxAge) + delta);
VersionGCStats stats = gc.gc(maxAge, HOURS);
assertEquals(10, stats.splitDocGCCount);
DocumentNodeState test = getDoc("/test").getNodeAtRevision(
store, store.getHeadRevision(), null);
assertNotNull(test);
assertTrue(test.hasProperty("test"));
}
// OAK-1779
@Test
public void cacheConsistency() throws Exception {
long maxAge = 1; //hrs
long delta = TimeUnit.MINUTES.toMillis(10);
Set<String> names = Sets.newHashSet();
NodeBuilder b1 = store.getRoot().builder();
for (int i = 0; i < 10; i++) {
String name = "test-" + i;
b1.child(name);
names.add(name);
}
store.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY);
for (ChildNodeEntry entry : store.getRoot().getChildNodeEntries()) {
entry.getNodeState();
}
b1 = store.getRoot().builder();
b1.getChildNode("test-7").remove();
names.remove("test-7");
store.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY);
clock.waitUntil(clock.getTime() + HOURS.toMillis(maxAge) + delta);
VersionGCStats stats = gc.gc(maxAge, HOURS);
assertEquals(1, stats.deletedDocGCCount);
Set<String> children = Sets.newHashSet();
for (ChildNodeEntry entry : store.getRoot().getChildNodeEntries()) {
children.add(entry.getName());
}
assertEquals(names, children);
}
// OAK-1793
@Test
public void gcPrevWithMostRecentModification() throws Exception {
long maxAge = 1; //hrs
long delta = TimeUnit.MINUTES.toMillis(10);
for (int i = 0; i < NUM_REVS_THRESHOLD + 1; i++) {
NodeBuilder builder = store.getRoot().builder();
builder.child("foo").setProperty("prop", "v" + i);
builder.child("bar").setProperty("prop", "v" + i);
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
}
store.runBackgroundOperations();
clock.waitUntil(clock.getTime() + HOURS.toMillis(maxAge) + delta);
VersionGCStats stats = gc.gc(maxAge, HOURS);
assertEquals(2, stats.splitDocGCCount);
NodeDocument doc = getDoc("/foo");
assertNotNull(doc);
DocumentNodeState state = doc.getNodeAtRevision(
store, store.getHeadRevision(), null);
assertNotNull(state);
}
// OAK-1791
@Test
public void gcDefaultLeafSplitDocs() throws Exception {
Revision.setClock(clock);
NodeBuilder builder = store.getRoot().builder();
builder.child("test").setProperty("prop", -1);
merge(store, builder);
String id = Utils.getIdFromPath("/test");
long start = Revision.getCurrentTimestamp();
// simulate continuous writes once a second for one day
// collect garbage older than one hour
int hours = 24;
if (fixture instanceof DocumentStoreFixture.MongoFixture) {
// only run for 6 hours on MongoDB to
// keep time to run on a reasonable level
hours = 6;
}
for (int i = 0; i < 3600 * hours; i++) {
clock.waitUntil(start + i * 1000);
builder = store.getRoot().builder();
builder.child("test").setProperty("prop", i);
merge(store, builder);
if (i % 10 == 0) {
store.runBackgroundOperations();
}
// trigger GC twice an hour
if (i % 1800 == 0) {
gc.gc(1, HOURS);
NodeDocument doc = store.getDocumentStore().find(NODES, id);
assertNotNull(doc);
int numPrevDocs = Iterators.size(doc.getAllPreviousDocs());
assertTrue("too many previous docs: " + numPrevDocs,
numPrevDocs < 70);
}
}
NodeDocument doc = store.getDocumentStore().find(NODES, id);
assertNotNull(doc);
int numRevs = size(doc.getValueMap("prop").entrySet());
assertTrue("too many revisions: " + numRevs, numRevs < 6000);
}
// OAK-2778
@Test
public void gcWithConcurrentModification() throws Exception {
Revision.setClock(clock);
DocumentStore ds = store.getDocumentStore();
// create test content
createTestNode("foo");
createTestNode("bar");
// remove again
NodeBuilder builder = store.getRoot().builder();
builder.getChildNode("foo").remove();
builder.getChildNode("bar").remove();
merge(store, builder);
// wait one hour
clock.waitUntil(clock.getTime() + HOURS.toMillis(1));
final BlockingQueue<NodeDocument> docs = Queues.newSynchronousQueue();
VersionGCSupport gcSupport = new VersionGCSupport(store.getDocumentStore()) {
@Override
public Iterable<NodeDocument> getPossiblyDeletedDocs(long lastModifiedTime) {
return filter(super.getPossiblyDeletedDocs(lastModifiedTime),
new Predicate<NodeDocument>() {
@Override
public boolean apply(NodeDocument input) {
try {
docs.put(input);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
return true;
}
});
}
};
final VersionGarbageCollector gc = new VersionGarbageCollector(store, gcSupport);
// start GC -> will try to remove /foo and /bar
Future<VersionGCStats> f = execService.submit(new Callable<VersionGCStats>() {
@Override
public VersionGCStats call() throws Exception {
return gc.gc(30, MINUTES);
}
});
NodeDocument doc = docs.take();
String name = PathUtils.getName(doc.getPath());
// recreate node, which hasn't been removed yet
name = name.equals("foo") ? "bar" : "foo";
builder = store.getRoot().builder();
builder.child(name);
merge(store, builder);
// loop over child node entries -> will populate nodeChildrenCache
for (ChildNodeEntry cne : store.getRoot().getChildNodeEntries()) {
cne.getName();
}
// invalidate cached DocumentNodeState
DocumentNodeState state = (DocumentNodeState) store.getRoot().getChildNode(name);
store.invalidateNodeCache(state.getPath(), store.getRoot().getLastRevision());
while (!f.isDone()) {
docs.poll();
}
// read children again after GC finished
List<String> names = Lists.newArrayList();
for (ChildNodeEntry cne : store.getRoot().getChildNodeEntries()) {
names.add(cne.getName());
}
assertEquals(1, names.size());
doc = ds.find(NODES, Utils.getIdFromPath("/" + names.get(0)));
assertNotNull(doc);
assertEquals(0, Iterators.size(doc.getAllPreviousDocs()));
VersionGCStats stats = f.get();
assertEquals(1, stats.deletedDocGCCount);
assertEquals(2, stats.splitDocGCCount);
}
// OAK-4819
@Test
public void malformedId() throws Exception {
long maxAge = 1; //hrs
long delta = TimeUnit.MINUTES.toMillis(10);
NodeBuilder builder = store.getRoot().builder();
builder.child("foo");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
// remove again
builder = store.getRoot().builder();
builder.child("foo").remove();
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
store.runBackgroundOperations();
// add a document with a malformed id
String id = "42";
UpdateOp op = new UpdateOp(id, true);
NodeDocument.setDeletedOnce(op);
NodeDocument.setModified(op, store.newRevision());
store.getDocumentStore().create(NODES, Lists.newArrayList(op));
clock.waitUntil(clock.getTime() + HOURS.toMillis(maxAge) + delta);
// gc must not fail
VersionGCStats stats = gc.gc(maxAge, HOURS);
assertEquals(1, stats.deletedDocGCCount);
}
@Test
public void invalidateCacheOnMissingPreviousDocument() throws Exception {
assumeTrue(fixture.hasSinglePersistence());
DocumentStore ds = store.getDocumentStore();
NodeBuilder builder = store.getRoot().builder();
builder.child("foo");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
for (int i = 0; i < 60; i++) {
builder = store.getRoot().builder();
builder.child("foo").setProperty("p", i);
merge(store, builder);
RevisionVector head = store.getHeadRevision();
for (UpdateOp op : SplitOperations.forDocument(
ds.find(NODES, Utils.getIdFromPath("/foo")), store, head,
NO_BINARY, 2)) {
ds.createOrUpdate(NODES, op);
}
clock.waitUntil(clock.getTime() + TimeUnit.MINUTES.toMillis(1));
}
store.runBackgroundOperations();
NodeDocument foo = ds.find(NODES, Utils.getIdFromPath("/foo"));
assertNotNull(foo);
Long modCount = foo.getModCount();
assertNotNull(modCount);
List<String> prevIds = Lists.newArrayList(Iterators.transform(
foo.getPreviousDocLeaves(), new Function<NodeDocument, String>() {
@Override
public String apply(NodeDocument input) {
return input.getId();
}
}));
// run gc on another document node store
DocumentStore ds2 = fixture.createDocumentStore(2);
DocumentNodeStore ns2 = new DocumentMK.Builder().setClusterId(2)
.clock(clock).setAsyncDelay(0).setDocumentStore(ds2).getNodeStore();
try {
VersionGarbageCollector gc = ns2.getVersionGarbageCollector();
// collect about half of the changes
gc.gc(30, TimeUnit.MINUTES);
} finally {
ns2.dispose();
}
// evict prev docs from cache and force DocumentStore
// to check with storage again
for (String id : prevIds) {
ds.invalidateCache(NODES, id);
}
foo = ds.find(NODES, Utils.getIdFromPath("/foo"));
assertNotNull(foo);
Iterators.size(foo.getAllPreviousDocs());
// foo must now reflect state after GC
foo = ds.find(NODES, Utils.getIdFromPath("/foo"));
assertNotEquals(modCount, foo.getModCount());
}
private void createTestNode(String name) throws CommitFailedException {
DocumentStore ds = store.getDocumentStore();
NodeBuilder builder = store.getRoot().builder();
builder.child(name);
merge(store, builder);
String id = Utils.getIdFromPath("/" + name);
int i = 0;
while (ds.find(NODES, id).getPreviousRanges().isEmpty()) {
builder = store.getRoot().builder();
builder.getChildNode(name).setProperty("p", i++);
merge(store, builder);
store.runBackgroundOperations();
}
}
private void merge(DocumentNodeStore store, NodeBuilder builder)
throws CommitFailedException {
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
}
private NodeDocument getDoc(String path){
return store.getDocumentStore().find(NODES, Utils.getIdFromPath(path), 0);
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package io.bazel.rulesscala.jar;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarOutputStream;
import java.util.zip.CRC32;
import java.util.zip.ZipException;
/**
* A simple helper class for creating Jar files. All Jar entries are sorted alphabetically. Allows
* normalization of Jar entries by setting the timestamp of non-.class files to the DOS epoch.
* Timestamps of .class files are set to the DOS epoch + 2 seconds (The zip timestamp granularity)
* Adjusting the timestamp for .class files is neccessary since otherwise javac will recompile java
* files if both the java file and its .class file are present.
*/
public class JarHelper {
public static final String MANIFEST_DIR = "META-INF/";
public static final String MANIFEST_NAME = JarFile.MANIFEST_NAME;
public static final String SERVICES_DIR = "META-INF/services/";
public static final long DOS_EPOCH_IN_JAVA_TIME = 315561600000L;
// ZIP timestamps have a resolution of 2 seconds.
// see http://www.info-zip.org/FAQ.html#limits
public static final long MINIMUM_TIMESTAMP_INCREMENT = 2000L;
// The name of the Jar file we want to create
protected final String jarFile;
// The properties to describe how to create the Jar
protected boolean normalize;
protected int storageMethod = JarEntry.DEFLATED;
protected boolean verbose = false;
// The state needed to create the Jar
protected final Set<String> names = new HashSet<>();
protected JarOutputStream out;
public JarHelper(String filename) {
jarFile = filename;
}
public static boolean isJar(File file) {
return file.getName().endsWith(".jar") && (file.isFile());
}
/**
* Enables or disables the Jar entry normalization.
*
* @param normalize If true the timestamps of Jar entries will be set to the
* DOS epoch.
*/
public void setNormalize(boolean normalize) {
this.normalize = normalize;
}
/**
* Enables or disables compression for the Jar file entries.
*
* @param compression if true enables compressions for the Jar file entries.
*/
public void setCompression(boolean compression) {
storageMethod = compression ? JarEntry.DEFLATED : JarEntry.STORED;
}
/**
* Enables or disables verbose messages.
*
* @param verbose if true enables verbose messages.
*/
public void setVerbose(boolean verbose) {
this.verbose = verbose;
}
/**
* Returns the normalized timestamp for a jar entry based on its name.
* This is necessary since javac will, when loading a class X, prefer a
* source file to a class file, if both files have the same timestamp.
* Therefore, we need to adjust the timestamp for class files to slightly
* after the normalized time.
* @param name The name of the file for which we should return the
* normalized timestamp.
* @return the time for a new Jar file entry in milliseconds since the epoch.
*/
private long normalizedTimestamp(String name) {
if (name.endsWith(".class")) {
return DOS_EPOCH_IN_JAVA_TIME + MINIMUM_TIMESTAMP_INCREMENT;
} else {
return DOS_EPOCH_IN_JAVA_TIME;
}
}
/**
* Returns the time for a new Jar file entry in milliseconds since the epoch.
* Uses {@link JarCreator#DOS_EPOCH_IN_JAVA_TIME} for normalized entries,
* {@link System#currentTimeMillis()} otherwise.
*
* @param filename The name of the file for which we are entering the time
* @return the time for a new Jar file entry in milliseconds since the epoch.
*/
protected long newEntryTimeMillis(String filename) {
return normalize ? normalizedTimestamp(filename) : System.currentTimeMillis();
}
/**
* Writes an entry with specific contents to the jar. Directory entries must
* include the trailing '/'.
*/
protected void writeEntry(JarOutputStream out, String name, byte[] content) throws IOException {
if (names.add(name)) {
// Create a new entry
JarEntry entry = new JarEntry(name);
entry.setTime(newEntryTimeMillis(name));
int size = content.length;
entry.setSize(size);
if (size == 0) {
entry.setMethod(JarEntry.STORED);
entry.setCrc(0);
out.putNextEntry(entry);
} else {
entry.setMethod(storageMethod);
if (storageMethod == JarEntry.STORED) {
CRC32 crc = new CRC32();
crc.update(content);
entry.setCrc(crc.getValue());
}
out.putNextEntry(entry);
out.write(content);
}
out.closeEntry();
}
}
/**
* Writes a standard Java manifest entry into the JarOutputStream. This
* includes the directory entry for the "META-INF" directory
*
* @param content the Manifest content to write to the manifest entry.
* @throws IOException
*/
protected void writeManifestEntry(byte[] content) throws IOException {
int oldStorageMethod = storageMethod;
// Do not compress small manifest files, the compressed one is frequently
// larger than the original. The threshold of 256 bytes is somewhat arbitrary.
if (content.length < 256) {
storageMethod = JarEntry.STORED;
}
try {
writeEntry(out, MANIFEST_DIR, new byte[]{});
writeEntry(out, MANIFEST_NAME, content);
} finally {
storageMethod = oldStorageMethod;
}
}
/**
* This copies the contents of jarFile into out
* This is a static method to make it clear what is mutated (and it
* was written by someone who really likes to minimize state changes).
*/
static private void copyJar(JarFile nameJf, Set<String> names, JarOutputStream out) throws IOException {
byte[] buffer = new byte[2048];
for (Enumeration<JarEntry> e = nameJf.entries(); e.hasMoreElements();) {
JarEntry existing = e.nextElement();
String name = existing.getName();
if (!names.contains(name)) {
JarEntry outEntry = new JarEntry(name);
outEntry.setTime(existing.getTime());
outEntry.setSize(existing.getSize());
out.putNextEntry(outEntry);
InputStream in = nameJf.getInputStream(existing);
while (0 < in.available()) {
int read = in.read(buffer);
out.write(buffer, 0, read);
}
in.close();
out.closeEntry();
names.add(name);
}
}
}
/**
* Copies file or directory entries from the file system into the jar.
* Directory entries will be detected and their names automatically '/'
* suffixed.
*/
protected void copyEntry(String name, File file) throws IOException {
if (!names.contains(name)) {
if (!file.exists()) {
throw new FileNotFoundException(file.getAbsolutePath() + " (No such file or directory)");
}
boolean isDirectory = file.isDirectory();
if (isDirectory && !name.endsWith("/")) {
name = name + '/'; // always normalize directory names before checking set
}
if (names.add(name)) {
if (verbose) {
System.err.println("adding " + file);
}
// Create a new entry
if (JarHelper.isJar(file)) {
JarFile nameJf = new JarFile(file);
copyJar(nameJf, names, out);
}
else {
long size = isDirectory ? 0 : file.length();
JarEntry outEntry = new JarEntry(name);
long newtime = normalize ? normalizedTimestamp(name) : file.lastModified();
outEntry.setTime(newtime);
outEntry.setSize(size);
if (size == 0L) {
outEntry.setMethod(JarEntry.STORED);
outEntry.setCrc(0);
out.putNextEntry(outEntry);
} else {
outEntry.setMethod(storageMethod);
if (storageMethod == JarEntry.STORED) {
outEntry.setCrc(hashFile(file));
}
out.putNextEntry(outEntry);
Files.copy(file.toPath(), out);
}
out.closeEntry();
}
}
}
}
protected long hashFile(File f) throws IOException {
FileInputStream fis = new FileInputStream(f);
CRC32 crc = new CRC32();
byte[] buffer = new byte[65536];
int bytesRead;
while((bytesRead = fis.read(buffer)) != -1) {
crc.update(buffer, 0, bytesRead);
}
return crc.getValue();
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.connect.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Contains summary information about the custom vocabulary.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/connect-2017-08-08/VocabularySummary" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class VocabularySummary implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* A unique name of the custom vocabulary.
* </p>
*/
private String name;
/**
* <p>
* The identifier of the custom vocabulary.
* </p>
*/
private String id;
/**
* <p>
* The Amazon Resource Name (ARN) of the custom vocabulary.
* </p>
*/
private String arn;
/**
* <p>
* The language code of the vocabulary entries. For a list of languages and their corresponding language codes, see
* <a href="https://docs.aws.amazon.com/transcribe/latest/dg/transcribe-whatis.html">What is Amazon Transcribe?</a>
* </p>
*/
private String languageCode;
/**
* <p>
* The current state of the custom vocabulary.
* </p>
*/
private String state;
/**
* <p>
* The timestamp when the custom vocabulary was last modified.
* </p>
*/
private java.util.Date lastModifiedTime;
/**
* <p>
* The reason why the custom vocabulary was not created.
* </p>
*/
private String failureReason;
/**
* <p>
* A unique name of the custom vocabulary.
* </p>
*
* @param name
* A unique name of the custom vocabulary.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* A unique name of the custom vocabulary.
* </p>
*
* @return A unique name of the custom vocabulary.
*/
public String getName() {
return this.name;
}
/**
* <p>
* A unique name of the custom vocabulary.
* </p>
*
* @param name
* A unique name of the custom vocabulary.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public VocabularySummary withName(String name) {
setName(name);
return this;
}
/**
* <p>
* The identifier of the custom vocabulary.
* </p>
*
* @param id
* The identifier of the custom vocabulary.
*/
public void setId(String id) {
this.id = id;
}
/**
* <p>
* The identifier of the custom vocabulary.
* </p>
*
* @return The identifier of the custom vocabulary.
*/
public String getId() {
return this.id;
}
/**
* <p>
* The identifier of the custom vocabulary.
* </p>
*
* @param id
* The identifier of the custom vocabulary.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public VocabularySummary withId(String id) {
setId(id);
return this;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the custom vocabulary.
* </p>
*
* @param arn
* The Amazon Resource Name (ARN) of the custom vocabulary.
*/
public void setArn(String arn) {
this.arn = arn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the custom vocabulary.
* </p>
*
* @return The Amazon Resource Name (ARN) of the custom vocabulary.
*/
public String getArn() {
return this.arn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the custom vocabulary.
* </p>
*
* @param arn
* The Amazon Resource Name (ARN) of the custom vocabulary.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public VocabularySummary withArn(String arn) {
setArn(arn);
return this;
}
/**
* <p>
* The language code of the vocabulary entries. For a list of languages and their corresponding language codes, see
* <a href="https://docs.aws.amazon.com/transcribe/latest/dg/transcribe-whatis.html">What is Amazon Transcribe?</a>
* </p>
*
* @param languageCode
* The language code of the vocabulary entries. For a list of languages and their corresponding language
* codes, see <a href="https://docs.aws.amazon.com/transcribe/latest/dg/transcribe-whatis.html">What is
* Amazon Transcribe?</a>
* @see VocabularyLanguageCode
*/
public void setLanguageCode(String languageCode) {
this.languageCode = languageCode;
}
/**
* <p>
* The language code of the vocabulary entries. For a list of languages and their corresponding language codes, see
* <a href="https://docs.aws.amazon.com/transcribe/latest/dg/transcribe-whatis.html">What is Amazon Transcribe?</a>
* </p>
*
* @return The language code of the vocabulary entries. For a list of languages and their corresponding language
* codes, see <a href="https://docs.aws.amazon.com/transcribe/latest/dg/transcribe-whatis.html">What is
* Amazon Transcribe?</a>
* @see VocabularyLanguageCode
*/
public String getLanguageCode() {
return this.languageCode;
}
/**
* <p>
* The language code of the vocabulary entries. For a list of languages and their corresponding language codes, see
* <a href="https://docs.aws.amazon.com/transcribe/latest/dg/transcribe-whatis.html">What is Amazon Transcribe?</a>
* </p>
*
* @param languageCode
* The language code of the vocabulary entries. For a list of languages and their corresponding language
* codes, see <a href="https://docs.aws.amazon.com/transcribe/latest/dg/transcribe-whatis.html">What is
* Amazon Transcribe?</a>
* @return Returns a reference to this object so that method calls can be chained together.
* @see VocabularyLanguageCode
*/
public VocabularySummary withLanguageCode(String languageCode) {
setLanguageCode(languageCode);
return this;
}
/**
* <p>
* The language code of the vocabulary entries. For a list of languages and their corresponding language codes, see
* <a href="https://docs.aws.amazon.com/transcribe/latest/dg/transcribe-whatis.html">What is Amazon Transcribe?</a>
* </p>
*
* @param languageCode
* The language code of the vocabulary entries. For a list of languages and their corresponding language
* codes, see <a href="https://docs.aws.amazon.com/transcribe/latest/dg/transcribe-whatis.html">What is
* Amazon Transcribe?</a>
* @return Returns a reference to this object so that method calls can be chained together.
* @see VocabularyLanguageCode
*/
public VocabularySummary withLanguageCode(VocabularyLanguageCode languageCode) {
this.languageCode = languageCode.toString();
return this;
}
/**
* <p>
* The current state of the custom vocabulary.
* </p>
*
* @param state
* The current state of the custom vocabulary.
* @see VocabularyState
*/
public void setState(String state) {
this.state = state;
}
/**
* <p>
* The current state of the custom vocabulary.
* </p>
*
* @return The current state of the custom vocabulary.
* @see VocabularyState
*/
public String getState() {
return this.state;
}
/**
* <p>
* The current state of the custom vocabulary.
* </p>
*
* @param state
* The current state of the custom vocabulary.
* @return Returns a reference to this object so that method calls can be chained together.
* @see VocabularyState
*/
public VocabularySummary withState(String state) {
setState(state);
return this;
}
/**
* <p>
* The current state of the custom vocabulary.
* </p>
*
* @param state
* The current state of the custom vocabulary.
* @return Returns a reference to this object so that method calls can be chained together.
* @see VocabularyState
*/
public VocabularySummary withState(VocabularyState state) {
this.state = state.toString();
return this;
}
/**
* <p>
* The timestamp when the custom vocabulary was last modified.
* </p>
*
* @param lastModifiedTime
* The timestamp when the custom vocabulary was last modified.
*/
public void setLastModifiedTime(java.util.Date lastModifiedTime) {
this.lastModifiedTime = lastModifiedTime;
}
/**
* <p>
* The timestamp when the custom vocabulary was last modified.
* </p>
*
* @return The timestamp when the custom vocabulary was last modified.
*/
public java.util.Date getLastModifiedTime() {
return this.lastModifiedTime;
}
/**
* <p>
* The timestamp when the custom vocabulary was last modified.
* </p>
*
* @param lastModifiedTime
* The timestamp when the custom vocabulary was last modified.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public VocabularySummary withLastModifiedTime(java.util.Date lastModifiedTime) {
setLastModifiedTime(lastModifiedTime);
return this;
}
/**
* <p>
* The reason why the custom vocabulary was not created.
* </p>
*
* @param failureReason
* The reason why the custom vocabulary was not created.
*/
public void setFailureReason(String failureReason) {
this.failureReason = failureReason;
}
/**
* <p>
* The reason why the custom vocabulary was not created.
* </p>
*
* @return The reason why the custom vocabulary was not created.
*/
public String getFailureReason() {
return this.failureReason;
}
/**
* <p>
* The reason why the custom vocabulary was not created.
* </p>
*
* @param failureReason
* The reason why the custom vocabulary was not created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public VocabularySummary withFailureReason(String failureReason) {
setFailureReason(failureReason);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getId() != null)
sb.append("Id: ").append(getId()).append(",");
if (getArn() != null)
sb.append("Arn: ").append(getArn()).append(",");
if (getLanguageCode() != null)
sb.append("LanguageCode: ").append(getLanguageCode()).append(",");
if (getState() != null)
sb.append("State: ").append(getState()).append(",");
if (getLastModifiedTime() != null)
sb.append("LastModifiedTime: ").append(getLastModifiedTime()).append(",");
if (getFailureReason() != null)
sb.append("FailureReason: ").append(getFailureReason());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof VocabularySummary == false)
return false;
VocabularySummary other = (VocabularySummary) obj;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
if (other.getArn() == null ^ this.getArn() == null)
return false;
if (other.getArn() != null && other.getArn().equals(this.getArn()) == false)
return false;
if (other.getLanguageCode() == null ^ this.getLanguageCode() == null)
return false;
if (other.getLanguageCode() != null && other.getLanguageCode().equals(this.getLanguageCode()) == false)
return false;
if (other.getState() == null ^ this.getState() == null)
return false;
if (other.getState() != null && other.getState().equals(this.getState()) == false)
return false;
if (other.getLastModifiedTime() == null ^ this.getLastModifiedTime() == null)
return false;
if (other.getLastModifiedTime() != null && other.getLastModifiedTime().equals(this.getLastModifiedTime()) == false)
return false;
if (other.getFailureReason() == null ^ this.getFailureReason() == null)
return false;
if (other.getFailureReason() != null && other.getFailureReason().equals(this.getFailureReason()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime * hashCode + ((getArn() == null) ? 0 : getArn().hashCode());
hashCode = prime * hashCode + ((getLanguageCode() == null) ? 0 : getLanguageCode().hashCode());
hashCode = prime * hashCode + ((getState() == null) ? 0 : getState().hashCode());
hashCode = prime * hashCode + ((getLastModifiedTime() == null) ? 0 : getLastModifiedTime().hashCode());
hashCode = prime * hashCode + ((getFailureReason() == null) ? 0 : getFailureReason().hashCode());
return hashCode;
}
@Override
public VocabularySummary clone() {
try {
return (VocabularySummary) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.connect.model.transform.VocabularySummaryMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2014, Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
*
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.grpc;
import static com.google.common.base.Charsets.US_ASCII;
import static com.google.common.base.Charsets.UTF_8;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
import io.grpc.Metadata.TrustedAsciiMarshaller;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.TreeMap;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
/**
* Defines the status of an operation by providing a standard {@link Code} in conjunction with an
* optional descriptive message. Instances of {@code Status} are created by starting with the
* template for the appropriate {@link Status.Code} and supplementing it with additional
* information: {@code Status.NOT_FOUND.withDescription("Could not find 'important_file.txt'");}
*
* <p>For clients, every remote call will return a status on completion. In the case of errors this
* status may be propagated to blocking stubs as a {@link RuntimeException} or to a listener as an
* explicit parameter.
*
* <p>Similarly servers can report a status by throwing {@link StatusRuntimeException}
* or by passing the status to a callback.
*
* <p>Utility functions are provided to convert a status to an exception and to extract them
* back out.
*/
@Immutable
public final class Status {
/**
* The set of canonical status codes. If new codes are added over time they must choose
* a numerical value that does not collide with any previously used value.
*/
public enum Code {
/**
* The operation completed successfully.
*/
OK(0),
/**
* The operation was cancelled (typically by the caller).
*/
CANCELLED(1),
/**
* Unknown error. An example of where this error may be returned is
* if a Status value received from another address space belongs to
* an error-space that is not known in this address space. Also
* errors raised by APIs that do not return enough error information
* may be converted to this error.
*/
UNKNOWN(2),
/**
* Client specified an invalid argument. Note that this differs
* from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments
* that are problematic regardless of the state of the system
* (e.g., a malformed file name).
*/
INVALID_ARGUMENT(3),
/**
* Deadline expired before operation could complete. For operations
* that change the state of the system, this error may be returned
* even if the operation has completed successfully. For example, a
* successful response from a server could have been delayed long
* enough for the deadline to expire.
*/
DEADLINE_EXCEEDED(4),
/**
* Some requested entity (e.g., file or directory) was not found.
*/
NOT_FOUND(5),
/**
* Some entity that we attempted to create (e.g., file or directory) already exists.
*/
ALREADY_EXISTS(6),
/**
* The caller does not have permission to execute the specified
* operation. PERMISSION_DENIED must not be used for rejections
* caused by exhausting some resource (use RESOURCE_EXHAUSTED
* instead for those errors). PERMISSION_DENIED must not be
* used if the caller cannot be identified (use UNAUTHENTICATED
* instead for those errors).
*/
PERMISSION_DENIED(7),
/**
* Some resource has been exhausted, perhaps a per-user quota, or
* perhaps the entire file system is out of space.
*/
RESOURCE_EXHAUSTED(8),
/**
* Operation was rejected because the system is not in a state
* required for the operation's execution. For example, directory
* to be deleted may be non-empty, an rmdir operation is applied to
* a non-directory, etc.
*
* <p>A litmus test that may help a service implementor in deciding
* between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE:
* (a) Use UNAVAILABLE if the client can retry just the failing call.
* (b) Use ABORTED if the client should retry at a higher-level
* (e.g., restarting a read-modify-write sequence).
* (c) Use FAILED_PRECONDITION if the client should not retry until
* the system state has been explicitly fixed. E.g., if an "rmdir"
* fails because the directory is non-empty, FAILED_PRECONDITION
* should be returned since the client should not retry unless
* they have first fixed up the directory by deleting files from it.
*/
FAILED_PRECONDITION(9),
/**
* The operation was aborted, typically due to a concurrency issue
* like sequencer check failures, transaction aborts, etc.
*
* <p>See litmus test above for deciding between FAILED_PRECONDITION,
* ABORTED, and UNAVAILABLE.
*/
ABORTED(10),
/**
* Operation was attempted past the valid range. E.g., seeking or
* reading past end of file.
*
* <p>Unlike INVALID_ARGUMENT, this error indicates a problem that may
* be fixed if the system state changes. For example, a 32-bit file
* system will generate INVALID_ARGUMENT if asked to read at an
* offset that is not in the range [0,2^32-1], but it will generate
* OUT_OF_RANGE if asked to read from an offset past the current
* file size.
*
* <p>There is a fair bit of overlap between FAILED_PRECONDITION and OUT_OF_RANGE.
* We recommend using OUT_OF_RANGE (the more specific error) when it applies
* so that callers who are iterating through
* a space can easily look for an OUT_OF_RANGE error to detect when they are done.
*/
OUT_OF_RANGE(11),
/**
* Operation is not implemented or not supported/enabled in this service.
*/
UNIMPLEMENTED(12),
/**
* Internal errors. Means some invariants expected by underlying
* system has been broken. If you see one of these errors,
* something is very broken.
*/
INTERNAL(13),
/**
* The service is currently unavailable. This is a most likely a
* transient condition and may be corrected by retrying with
* a backoff.
*
* <p>See litmus test above for deciding between FAILED_PRECONDITION,
* ABORTED, and UNAVAILABLE.
*/
UNAVAILABLE(14),
/**
* Unrecoverable data loss or corruption.
*/
DATA_LOSS(15),
/**
* The request does not have valid authentication credentials for the
* operation.
*/
UNAUTHENTICATED(16);
private final int value;
private final byte[] valueAscii;
private Code(int value) {
this.value = value;
this.valueAscii = Integer.toString(value).getBytes(US_ASCII);
}
/**
* The numerical value of the code.
*/
public int value() {
return value;
}
public Status toStatus() {
return STATUS_LIST.get(value);
}
private byte[] valueAscii() {
return valueAscii;
}
}
// Create the canonical list of Status instances indexed by their code values.
private static final List<Status> STATUS_LIST = buildStatusList();
private static List<Status> buildStatusList() {
TreeMap<Integer, Status> canonicalizer = new TreeMap<Integer, Status>();
for (Code code : Code.values()) {
Status replaced = canonicalizer.put(code.value(), new Status(code));
if (replaced != null) {
throw new IllegalStateException("Code value duplication between "
+ replaced.getCode().name() + " & " + code.name());
}
}
return Collections.unmodifiableList(new ArrayList<Status>(canonicalizer.values()));
}
// A pseudo-enum of Status instances mapped 1:1 with values in Code. This simplifies construction
// patterns for derived instances of Status.
/** The operation completed successfully. */
public static final Status OK = Code.OK.toStatus();
/** The operation was cancelled (typically by the caller). */
public static final Status CANCELLED = Code.CANCELLED.toStatus();
/** Unknown error. See {@link Code#UNKNOWN}. */
public static final Status UNKNOWN = Code.UNKNOWN.toStatus();
/** Client specified an invalid argument. See {@link Code#INVALID_ARGUMENT}. */
public static final Status INVALID_ARGUMENT = Code.INVALID_ARGUMENT.toStatus();
/** Deadline expired before operation could complete. See {@link Code#DEADLINE_EXCEEDED}. */
public static final Status DEADLINE_EXCEEDED = Code.DEADLINE_EXCEEDED.toStatus();
/** Some requested entity (e.g., file or directory) was not found. */
public static final Status NOT_FOUND = Code.NOT_FOUND.toStatus();
/** Some entity that we attempted to create (e.g., file or directory) already exists. */
public static final Status ALREADY_EXISTS = Code.ALREADY_EXISTS.toStatus();
/**
* The caller does not have permission to execute the specified operation. See {@link
* Code#PERMISSION_DENIED}.
*/
public static final Status PERMISSION_DENIED = Code.PERMISSION_DENIED.toStatus();
/** The request does not have valid authentication credentials for the operation. */
public static final Status UNAUTHENTICATED = Code.UNAUTHENTICATED.toStatus();
/**
* Some resource has been exhausted, perhaps a per-user quota, or perhaps the entire file system
* is out of space.
*/
public static final Status RESOURCE_EXHAUSTED = Code.RESOURCE_EXHAUSTED.toStatus();
/**
* Operation was rejected because the system is not in a state required for the operation's
* execution. See {@link Code#FAILED_PRECONDITION}.
*/
public static final Status FAILED_PRECONDITION =
Code.FAILED_PRECONDITION.toStatus();
/**
* The operation was aborted, typically due to a concurrency issue like sequencer check failures,
* transaction aborts, etc. See {@link Code#ABORTED}.
*/
public static final Status ABORTED = Code.ABORTED.toStatus();
/** Operation was attempted past the valid range. See {@link Code#OUT_OF_RANGE}. */
public static final Status OUT_OF_RANGE = Code.OUT_OF_RANGE.toStatus();
/** Operation is not implemented or not supported/enabled in this service. */
public static final Status UNIMPLEMENTED = Code.UNIMPLEMENTED.toStatus();
/** Internal errors. See {@link Code#INTERNAL}. */
public static final Status INTERNAL = Code.INTERNAL.toStatus();
/** The service is currently unavailable. See {@link Code#UNAVAILABLE}. */
public static final Status UNAVAILABLE = Code.UNAVAILABLE.toStatus();
/** Unrecoverable data loss or corruption. */
public static final Status DATA_LOSS = Code.DATA_LOSS.toStatus();
/**
* Return a {@link Status} given a canonical error {@link Code} value.
*/
public static Status fromCodeValue(int codeValue) {
if (codeValue < 0 || codeValue > STATUS_LIST.size()) {
return UNKNOWN.withDescription("Unknown code " + codeValue);
} else {
return STATUS_LIST.get(codeValue);
}
}
private static Status fromCodeValue(byte[] asciiCodeValue) {
if (asciiCodeValue.length == 1 && asciiCodeValue[0] == '0') {
return Status.OK;
}
return fromCodeValueSlow(asciiCodeValue);
}
@SuppressWarnings("fallthrough")
private static Status fromCodeValueSlow(byte[] asciiCodeValue) {
int index = 0;
int codeValue = 0;
switch (asciiCodeValue.length) {
case 2:
if (asciiCodeValue[index] < '0' || asciiCodeValue[index] > '9') {
break;
}
codeValue += (asciiCodeValue[index++] - '0') * 10;
// fall through
case 1:
if (asciiCodeValue[index] < '0' || asciiCodeValue[index] > '9') {
break;
}
codeValue += asciiCodeValue[index] - '0';
if (codeValue < STATUS_LIST.size()) {
return STATUS_LIST.get(codeValue);
}
break;
default:
break;
}
return UNKNOWN.withDescription("Unknown code " + new String(asciiCodeValue, US_ASCII));
}
/**
* Return a {@link Status} given a canonical error {@link Code} object.
*/
public static Status fromCode(Code code) {
return code.toStatus();
}
/**
* Key to bind status code to trailing metadata.
*/
@Internal
public static final Metadata.Key<Status> CODE_KEY
= Metadata.Key.of("grpc-status", new StatusCodeMarshaller());
/**
* Marshals status messages for ({@link #MESSAGE_KEY}. gRPC does not use binary coding of
* status messages by default, which makes sending arbitrary strings difficult. This marshaller
* uses ASCII printable characters by default, and percent encodes (e.g. %0A) all non ASCII bytes.
* This leads to normal text being mostly readable (especially useful for debugging), and special
* text still being sent.
*
* <p>By default, the HTTP spec says that header values must be encoded using a strict subset of
* ASCII (See RFC 7230 section 3.2.6). HTTP/2 HPACK allows use of arbitrary binary headers, but
* we do not use them for interoperating with existing HTTP/1.1 code. Since the grpc-message
* is encoded to such a header, it needs to not use forbidden characters.
*
* <p>This marshaller works by converting the passed in string into UTF-8, checking to see if
* each individual byte is an allowable byte, and then either percent encoding or passing it
* through. When percent encoding, the byte is converted into hexadecimal notation with a '%'
* prepended.
*
* <p>When unmarshalling, bytes are passed through unless they match the "%XX" pattern. If they
* do match, the unmarshaller attempts to convert them back into their original UTF-8 byte
* sequence. After the input header bytes are converted into UTF-8 bytes, the new byte array is
* reinterpretted back as a string.
*/
private static final TrustedAsciiMarshaller<String> STATUS_MESSAGE_MARSHALLER =
new StatusMessageMarshaller();
/**
* Key to bind status message to trailing metadata.
*/
@Internal
public static final Metadata.Key<String> MESSAGE_KEY =
Metadata.Key.of("grpc-message", STATUS_MESSAGE_MARSHALLER);
/**
* Extract an error {@link Status} from the causal chain of a {@link Throwable}.
* If no status can be found, a status is created with {@link Code#UNKNOWN} as its code and
* {@code t} as its cause.
*
* @return non-{@code null} status
*/
public static Status fromThrowable(Throwable t) {
Throwable cause = checkNotNull(t, "t");
while (cause != null) {
if (cause instanceof StatusException) {
return ((StatusException) cause).getStatus();
} else if (cause instanceof StatusRuntimeException) {
return ((StatusRuntimeException) cause).getStatus();
}
cause = cause.getCause();
}
// Couldn't find a cause with a Status
return UNKNOWN.withCause(t);
}
/**
* Extract an error trailers from the causal chain of a {@link Throwable}.
*
* @return the trailers or {@code null} if not found.
*/
@ExperimentalApi
public static Metadata trailersFromThrowable(Throwable t) {
Throwable cause = checkNotNull(t, "t");
while (cause != null) {
if (cause instanceof StatusException) {
return ((StatusException) cause).getTrailers();
} else if (cause instanceof StatusRuntimeException) {
return ((StatusRuntimeException) cause).getTrailers();
}
cause = cause.getCause();
}
return null;
}
static String formatThrowableMessage(Status status) {
if (status.description == null) {
return status.code.toString();
} else {
return status.code + ": " + status.description;
}
}
private final Code code;
private final String description;
private final Throwable cause;
private Status(Code code) {
this(code, null, null);
}
private Status(Code code, @Nullable String description, @Nullable Throwable cause) {
this.code = checkNotNull(code, "code");
this.description = description;
this.cause = cause;
}
/**
* Create a derived instance of {@link Status} with the given cause.
* However, the cause is not transmitted from server to client.
*/
public Status withCause(Throwable cause) {
if (Objects.equal(this.cause, cause)) {
return this;
}
return new Status(this.code, this.description, cause);
}
/**
* Create a derived instance of {@link Status} with the given description. Leading and trailing
* whitespace may be removed; this may change in the future.
*/
public Status withDescription(String description) {
if (Objects.equal(this.description, description)) {
return this;
}
return new Status(this.code, description, this.cause);
}
/**
* Create a derived instance of {@link Status} augmenting the current description with
* additional detail. Leading and trailing whitespace may be removed; this may change in the
* future.
*/
public Status augmentDescription(String additionalDetail) {
if (additionalDetail == null) {
return this;
} else if (this.description == null) {
return new Status(this.code, additionalDetail, this.cause);
} else {
return new Status(this.code, this.description + "\n" + additionalDetail, this.cause);
}
}
/**
* The canonical status code.
*/
public Code getCode() {
return code;
}
/**
* A description of this status for human consumption.
*/
@Nullable
public String getDescription() {
return description;
}
/**
* The underlying cause of an error.
* Note that the cause is not transmitted from server to client.
*/
@Nullable
public Throwable getCause() {
return cause;
}
/**
* Is this status OK, i.e., not an error.
*/
public boolean isOk() {
return Code.OK == code;
}
/**
* Convert this {@link Status} to a {@link RuntimeException}. Use {@link #fromThrowable}
* to recover this {@link Status} instance when the returned exception is in the causal chain.
*/
public StatusRuntimeException asRuntimeException() {
return new StatusRuntimeException(this);
}
/**
* Same as {@link #asRuntimeException()} but includes the provided trailers in the returned
* exception.
*/
@ExperimentalApi
public StatusRuntimeException asRuntimeException(Metadata trailers) {
return new StatusRuntimeException(this, trailers);
}
/**
* Convert this {@link Status} to an {@link Exception}. Use {@link #fromThrowable}
* to recover this {@link Status} instance when the returned exception is in the causal chain.
*/
public StatusException asException() {
return new StatusException(this);
}
/**
* Same as {@link #asException()} but includes the provided trailers in the returned exception.
*/
@ExperimentalApi
public StatusException asException(Metadata trailers) {
return new StatusException(this, trailers);
}
/** A string representation of the status useful for debugging. */
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("code", code.name())
.add("description", description)
.add("cause", cause)
.toString();
}
private static final class StatusCodeMarshaller implements TrustedAsciiMarshaller<Status> {
@Override
public byte[] toAsciiString(Status status) {
return status.getCode().valueAscii();
}
@Override
public Status parseAsciiString(byte[] serialized) {
return fromCodeValue(serialized);
}
}
private static final class StatusMessageMarshaller implements TrustedAsciiMarshaller<String> {
private static final byte[] HEX =
{'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'};
@Override
public byte[] toAsciiString(String value) {
byte[] valueBytes = value.getBytes(UTF_8);
for (int i = 0; i < valueBytes.length; i++) {
byte b = valueBytes[i];
// If there are only non escaping characters, skip the slow path.
if (isEscapingChar(b)) {
return toAsciiStringSlow(valueBytes, i);
}
}
return valueBytes;
}
private static boolean isEscapingChar(byte b) {
return b < ' ' || b >= '~' || b == '%';
}
/**
* @param valueBytes the UTF-8 bytes
* @param ri The reader index, pointed at the first byte that needs escaping.
*/
private static byte[] toAsciiStringSlow(byte[] valueBytes, int ri) {
byte[] escapedBytes = new byte[ri + (valueBytes.length - ri) * 3];
// copy over the good bytes
if (ri != 0) {
System.arraycopy(valueBytes, 0, escapedBytes, 0, ri);
}
int wi = ri;
for (; ri < valueBytes.length; ri++) {
byte b = valueBytes[ri];
// Manually implement URL encoding, per the gRPC spec.
if (isEscapingChar(b)) {
escapedBytes[wi] = '%';
escapedBytes[wi + 1] = HEX[(b >> 4) & 0xF];
escapedBytes[wi + 2] = HEX[b & 0xF];
wi += 3;
continue;
}
escapedBytes[wi++] = b;
}
byte[] dest = new byte[wi];
System.arraycopy(escapedBytes, 0, dest, 0, wi);
return dest;
}
@SuppressWarnings("deprecation") // Use fast but deprecated String ctor
@Override
public String parseAsciiString(byte[] value) {
for (int i = 0; i < value.length; i++) {
byte b = value[i];
if (b < ' ' || b >= '~' || (b == '%' && i + 2 < value.length)) {
return parseAsciiStringSlow(value);
}
}
return new String(value, 0);
}
private static String parseAsciiStringSlow(byte[] value) {
ByteBuffer buf = ByteBuffer.allocate(value.length);
for (int i = 0; i < value.length;) {
if (value[i] == '%' && i + 2 < value.length) {
try {
buf.put((byte)Integer.parseInt(new String(value, i + 1, 2, US_ASCII), 16));
i += 3;
continue;
} catch (NumberFormatException e) {
// ignore, fall through, just push the bytes.
}
}
buf.put(value[i]);
i += 1;
}
return new String(buf.array(), 0, buf.position(), UTF_8);
}
}
/**
* Equality on Statuses is not well defined. Instead, do comparison based on their Code with
* {@link #getCode}. The description and cause of the Status are unlikely to be stable, and
* additional fields may be added to Status in the future.
*/
@Override
public boolean equals(Object obj) {
return super.equals(obj);
}
/**
* Hash codes on Statuses are not well defined.
*
* @see #equals
*/
@Override
public int hashCode() {
return super.hashCode();
}
}
| |
/**
* Copyright Microsoft Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.microsoft.azure.storage;
import com.microsoft.azure.storage.blob.BlobInputStream;
import com.microsoft.azure.storage.blob.BlobOutputStream;
import com.microsoft.azure.storage.file.FileInputStream;
import com.microsoft.azure.storage.file.FileOutputStream;
/**
* RESERVED FOR INTERNAL USE. Contains storage constants.
*/
public final class Constants {
/**
* Defines constants for ServiceProperties requests.
*/
public static class AnalyticsConstants {
/**
* The XML element for the CORS Rule AllowedHeaders
*/
public static final String ALLOWED_HEADERS_ELEMENT = "AllowedHeaders";
/**
* The XML element for the CORS Rule AllowedMethods
*/
public static final String ALLOWED_METHODS_ELEMENT = "AllowedMethods";
/**
* The XML element for the CORS Rule AllowedOrigins
*/
public static final String ALLOWED_ORIGINS_ELEMENT = "AllowedOrigins";
/**
* The XML element for the CORS
*/
public static final String CORS_ELEMENT = "Cors";
/**
* The XML element for the CORS Rules
*/
public static final String CORS_RULE_ELEMENT = "CorsRule";
/**
* The XML element for the RetentionPolicy Days.
*/
public static final String DAYS_ELEMENT = "Days";
/**
* The XML element for the Default Service Version.
*/
public static final String DEFAULT_SERVICE_VERSION = "DefaultServiceVersion";
/**
* The XML element for the Logging Delete type.
*/
public static final String DELETE_ELEMENT = "Delete";
/**
* The XML element for the RetentionPolicy Enabled.
*/
public static final String ENABLED_ELEMENT = "Enabled";
/**
* The XML element for the CORS Rule ExposedHeaders
*/
public static final String EXPOSED_HEADERS_ELEMENT = "ExposedHeaders";
/**
* The XML element for the Hour Metrics
*/
public static final String HOUR_METRICS_ELEMENT = "HourMetrics";
/**
* The XML element for the Metrics IncludeAPIs.
*/
public static final String INCLUDE_APIS_ELEMENT = "IncludeAPIs";
/**
* Constant for the logs container.
*/
public static final String LOGS_CONTAINER = "$logs";
/**
* The XML element for the Logging
*/
public static final String LOGGING_ELEMENT = "Logging";
/**
* The XML element for the CORS Rule MaxAgeInSeconds
*/
public static final String MAX_AGE_IN_SECONDS_ELEMENT = "MaxAgeInSeconds";
/**
* Constant for the blob capacity metrics table.
*/
public static final String METRICS_CAPACITY_BLOB = "$MetricsCapacityBlob";
/**
* Constant for the blob service primary location hourly metrics table.
*/
public static final String METRICS_HOUR_PRIMARY_TRANSACTIONS_BLOB = "$MetricsHourPrimaryTransactionsBlob";
/**
* Constant for the file service primary location hourly metrics table.
*/
public static final String METRICS_HOUR_PRIMARY_TRANSACTIONS_FILE = "$MetricsHourPrimaryTransactionsFile";
/**
* Constant for the table service primary location hourly metrics table.
*/
public static final String METRICS_HOUR_PRIMARY_TRANSACTIONS_TABLE = "$MetricsHourPrimaryTransactionsTable";
/**
* Constant for the queue service primary location hourly metrics table.
*/
public static final String METRICS_HOUR_PRIMARY_TRANSACTIONS_QUEUE = "$MetricsHourPrimaryTransactionsQueue";
/**
* Constant for the blob service primary location minute metrics table.
*/
public static final String METRICS_MINUTE_PRIMARY_TRANSACTIONS_BLOB = "$MetricsMinutePrimaryTransactionsBlob";
/**
* Constant for the file service primary location minute metrics table.
*/
public static final String METRICS_MINUTE_PRIMARY_TRANSACTIONS_FILE = "$MetricsMinutePrimaryTransactionsFile";
/**
* Constant for the table service primary location minute metrics table.
*/
public static final String METRICS_MINUTE_PRIMARY_TRANSACTIONS_TABLE = "$MetricsMinutePrimaryTransactionsTable";
/**
* Constant for the queue service primary location minute metrics table.
*/
public static final String METRICS_MINUTE_PRIMARY_TRANSACTIONS_QUEUE = "$MetricsMinutePrimaryTransactionsQueue";
/**
* Constant for the blob service secondary location hourly metrics table.
*/
public static final String METRICS_HOUR_SECONDARY_TRANSACTIONS_BLOB = "$MetricsHourSecondaryTransactionsBlob";
/**
* Constant for the file service secondary location hourly metrics table.
*/
public static final String METRICS_HOUR_SECONDARY_TRANSACTIONS_FILE = "$MetricsHourSecondaryTransactionsFile";
/**
* Constant for the table service secondary location hourly metrics table.
*/
public static final String METRICS_HOUR_SECONDARY_TRANSACTIONS_TABLE = "$MetricsHourSecondaryTransactionsTable";
/**
* Constant for the queue service secondary location hourly metrics table.
*/
public static final String METRICS_HOUR_SECONDARY_TRANSACTIONS_QUEUE = "$MetricsHourSecondaryTransactionsQueue";
/**
* Constant for the blob service secondary location minute metrics table.
*/
public static final String METRICS_MINUTE_SECONDARY_TRANSACTIONS_BLOB = "$MetricsMinuteSecondaryTransactionsBlob";
/**
* Constant for the file service secondary location minute metrics table.
*/
public static final String METRICS_MINUTE_SECONDARY_TRANSACTIONS_FILE = "$MetricsMinuteSecondaryTransactionsFile";
/**
* Constant for the table service secondary location minute metrics table.
*/
public static final String METRICS_MINUTE_SECONDARY_TRANSACTIONS_TABLE = "$MetricsMinuteSecondaryTransactionsTable";
/**
* Constant for the queue service secondary location minute metrics table.
*/
public static final String METRICS_MINUTE_SECONDARY_TRANSACTIONS_QUEUE = "$MetricsMinuteSecondaryTransactionsQueue";
/**
* The XML element for the Minute Metrics
*/
public static final String MINUTE_METRICS_ELEMENT = "MinuteMetrics";
/**
* The XML element for the Logging Read type.
*/
public static final String READ_ELEMENT = "Read";
/**
* The XML element for the RetentionPolicy.
*/
public static final String RETENTION_POLICY_ELEMENT = "RetentionPolicy";
/**
* The XML element for the StorageServiceProperties
*/
public static final String STORAGE_SERVICE_PROPERTIES_ELEMENT = "StorageServiceProperties";
/**
* The XML element for the StorageServiceStats
*/
public static final String STORAGE_SERVICE_STATS = "StorageServiceStats";
/**
* The XML element for the Version
*/
public static final String VERSION_ELEMENT = "Version";
/**
* The XML element for the Logging Write type.
*/
public static final String WRITE_ELEMENT = "Write";
}
/**
* Defines constants for client encryption.
*/
public static class EncryptionConstants
{
/**
* Metadata header to store encryption materials.
*/
public static final String BLOB_ENCRYPTION_DATA = "encryptiondata";
/**
* Constant for the encryption protocol.
*/
public static final String ENCRYPTION_PROTOCOL_V1 = "1.0";
/**
* Encryption metadata key for key wrapping IV.
*/
public static final String KEY_WRAPPING_IV = "KeyWrappingIV";
/**
* Property name to store the encryption metadata.
*/
public static final String TABLE_ENCRYPTION_KEY_DETAILS = "_ClientEncryptionMetadata1";
/**
* Additional property name to store the encryption metadata.
*/
public static final String TABLE_ENCRYPTION_PROPERTY_DETAILS = "_ClientEncryptionMetadata2";
}
/**
* Defines constants for use with HTTP headers.
*/
public static class HeaderConstants {
/**
* The Accept header.
*/
public static final String ACCEPT = "Accept";
/**
* The Accept header.
*/
public static final String ACCEPT_CHARSET = "Accept-Charset";
/**
* The Authorization header.
*/
public static final String AUTHORIZATION = "Authorization";
/**
* The format string for specifying ranges with only begin offset.
*/
public static final String BEGIN_RANGE_HEADER_FORMAT = "bytes=%d-";
/**
* The format string for specifying the blob append offset.
*/
public static final String BLOB_APPEND_OFFSET = PREFIX_FOR_STORAGE_HEADER + "blob-append-offset";
/**
* The header that specifies committed block count.
*/
public static final String BLOB_COMMITTED_BLOCK_COUNT = PREFIX_FOR_STORAGE_HEADER + "blob-committed-block-count";
/**
* The header that specifies blob sequence number.
*/
public static final String BLOB_SEQUENCE_NUMBER = PREFIX_FOR_STORAGE_HEADER + "blob-sequence-number";
/**
* The CacheControl header.
*/
public static final String CACHE_CONTROL = "Cache-Control";
/**
* The header that specifies blob caching control.
*/
public static final String CACHE_CONTROL_HEADER = PREFIX_FOR_STORAGE_HEADER + "blob-cache-control";
/**
* The header that indicates the client request ID.
*/
public static final String CLIENT_REQUEST_ID_HEADER = PREFIX_FOR_STORAGE_HEADER + "client-request-id";
/**
* The ContentDisposition header.
*/
public static final String CONTENT_DISPOSITION = "Content-Disposition";
/**
* The ContentEncoding header.
*/
public static final String CONTENT_ENCODING = "Content-Encoding";
/**
* The ContentLangauge header.
*/
public static final String CONTENT_LANGUAGE = "Content-Language";
/**
* The ContentLength header.
*/
public static final String CONTENT_LENGTH = "Content-Length";
/**
* The ContentMD5 header.
*/
public static final String CONTENT_MD5 = "Content-MD5";
/**
* The ContentRange header.
*/
public static final String CONTENT_RANGE = "Content-Range";
/**
* The ContentType header.
*/
public static final String CONTENT_TYPE = "Content-Type";
/**
* The value of the copy action header that signifies an abort operation.
*/
public static final String COPY_ACTION_ABORT = "abort";
/**
* Header that specifies the copy action.
*/
public static final String COPY_ACTION_HEADER = PREFIX_FOR_STORAGE_HEADER + "copy-action";
/**
* The header that specifies copy completion time.
*/
public static final String COPY_COMPLETION_TIME = PREFIX_FOR_STORAGE_HEADER + "copy-completion-time";
/**
* The header that specifies copy id.
*/
public static final String COPY_ID = PREFIX_FOR_STORAGE_HEADER + "copy-id";
/**
* The header that specifies copy progress.
*/
public static final String COPY_PROGRESS = PREFIX_FOR_STORAGE_HEADER + "copy-progress";
/**
* The header that specifies copy source.
*/
public static final String COPY_SOURCE = PREFIX_FOR_STORAGE_HEADER + "copy-source";
/**
* The header for copy source.
*/
public static final String COPY_SOURCE_HEADER = PREFIX_FOR_STORAGE_HEADER + "copy-source";
/**
* The header that specifies copy status.
*/
public static final String COPY_STATUS = PREFIX_FOR_STORAGE_HEADER + "copy-status";
/**
* The header that specifies copy status description.
*/
public static final String COPY_STATUS_DESCRIPTION = PREFIX_FOR_STORAGE_HEADER + "copy-status-description";
/**
* The header that specifies the date.
*/
public static final String DATE = PREFIX_FOR_STORAGE_HEADER + "date";
/**
* The header to delete snapshots.
*/
public static final String DELETE_SNAPSHOT_HEADER = PREFIX_FOR_STORAGE_HEADER + "delete-snapshots";
/**
* The ETag header.
*/
public static final String ETAG = "ETag";
/**
* An unused HTTP code used internally to indicate a non-http related failure when constructing
* {@link StorageException} objects
*/
public static final int HTTP_UNUSED_306 = 306;
/**
* The blob append position equal header.
*/
public static final String IF_APPEND_POSITION_EQUAL_HEADER = PREFIX_FOR_STORAGE_HEADER + "blob-condition-appendpos";
/**
* The IfMatch header.
*/
public static final String IF_MATCH = "If-Match";
/**
* The blob maxsize condition header.
*/
public static final String IF_MAX_SIZE_LESS_THAN_OR_EQUAL = PREFIX_FOR_STORAGE_HEADER + "blob-condition-maxsize";
/**
* The IfModifiedSince header.
*/
public static final String IF_MODIFIED_SINCE = "If-Modified-Since";
/**
* The IfNoneMatch header.
*/
public static final String IF_NONE_MATCH = "If-None-Match";
/**
* The IfUnmodifiedSince header.
*/
public static final String IF_UNMODIFIED_SINCE = "If-Unmodified-Since";
/**
* The blob sequence number less than or equal condition header.
*/
public static final String IF_SEQUENCE_NUMBER_LESS_THAN_OR_EQUAL = PREFIX_FOR_STORAGE_HEADER + "if-sequence-number-le";
/**
* The blob sequence number less than condition header.
*/
public static final String IF_SEQUENCE_NUMBER_LESS_THAN = PREFIX_FOR_STORAGE_HEADER + "if-sequence-number-lt";
/**
* The blob sequence number equal condition header.
*/
public static final String IF_SEQUENCE_NUMBER_EQUAL = PREFIX_FOR_STORAGE_HEADER + "if-sequence-number-eq";
/**
* The header that specifies the lease action to perform
*/
public static final String LEASE_ACTION_HEADER = PREFIX_FOR_STORAGE_HEADER + "lease-action";
/**
* The header that specifies the break period of a lease
*/
public static final String LEASE_BREAK_PERIOD_HEADER = PREFIX_FOR_STORAGE_HEADER + "lease-break-period";
/**
* The header that specifies lease duration.
*/
public static final String LEASE_DURATION = PREFIX_FOR_STORAGE_HEADER + "lease-duration";
/**
* The header that specifies lease ID.
*/
public static final String LEASE_ID_HEADER = PREFIX_FOR_STORAGE_HEADER + "lease-id";
/**
* The header that specifies lease state.
*/
public static final String LEASE_STATE = PREFIX_FOR_STORAGE_HEADER + "lease-state";
/**
* The header that specifies lease status.
*/
public static final String LEASE_STATUS = PREFIX_FOR_STORAGE_HEADER + "lease-status";
/**
* The header that specifies the remaining lease time
*/
public static final String LEASE_TIME_HEADER = PREFIX_FOR_STORAGE_HEADER + "lease-time";
/**
* The header that specifies the pop receipt.
*/
public static final String POP_RECEIPT_HEADER = PREFIX_FOR_STORAGE_HEADER + "popreceipt";
/**
* The header prefix for metadata.
*/
public static final String PREFIX_FOR_STORAGE_METADATA = "x-ms-meta-";
/**
* The header prefix for properties.
*/
public static final String PREFIX_FOR_STORAGE_PROPERTIES = "x-ms-prop-";
/**
* The header that specifies the proposed lease ID for a leasing operation
*/
public static final String PROPOSED_LEASE_ID_HEADER = PREFIX_FOR_STORAGE_HEADER + "proposed-lease-id";
/**
* The Range header.
*/
public static final String RANGE = "Range";
/**
* The header that specifies if the request will populate the ContentMD5 header for range gets.
*/
public static final String RANGE_GET_CONTENT_MD5 = PREFIX_FOR_STORAGE_HEADER + "range-get-content-md5";
/**
* The format string for specifying ranges.
*/
public static final String RANGE_HEADER_FORMAT = "bytes=%d-%d";
/**
* The header that indicates the request ID.
*/
public static final String REQUEST_ID_HEADER = PREFIX_FOR_STORAGE_HEADER + "request-id";
/**
* The header field value received that indicates which server was accessed
*/
public static final String SERVER = "Server";
/**
* The header that specifies whether a resource is fully encrypted server-side
*/
public static final String SERVER_ENCRYPTED = PREFIX_FOR_STORAGE_HEADER + "server-encrypted";
/**
* The header that acknowledges data used for a write operation is encrypted server-side
*/
public static final String SERVER_REQUEST_ENCRYPTED = PREFIX_FOR_STORAGE_HEADER + "request-server-encrypted";
/**
* The header that specifies the snapshot ID.
*/
public static final String SNAPSHOT_ID_HEADER = PREFIX_FOR_STORAGE_HEADER + "snapshot";
/**
* The header for the If-Match condition.
*/
public static final String SOURCE_IF_MATCH_HEADER = PREFIX_FOR_STORAGE_HEADER + "source-if-match";
/**
* The header for the If-Modified-Since condition.
*/
public static final String SOURCE_IF_MODIFIED_SINCE_HEADER = PREFIX_FOR_STORAGE_HEADER
+ "source-if-modified-since";
/**
* The header for the If-None-Match condition.
*/
public static final String SOURCE_IF_NONE_MATCH_HEADER = PREFIX_FOR_STORAGE_HEADER + "source-if-none-match";
/**
* The header for the If-Unmodified-Since condition.
*/
public static final String SOURCE_IF_UNMODIFIED_SINCE_HEADER = PREFIX_FOR_STORAGE_HEADER
+ "source-if-unmodified-since";
/**
* The header for the source lease id.
*/
public static final String SOURCE_LEASE_ID_HEADER = PREFIX_FOR_STORAGE_HEADER + "source-lease-id";
/**
* The header for data ranges.
*/
public static final String STORAGE_RANGE_HEADER = PREFIX_FOR_STORAGE_HEADER + "range";
/**
* The header for storage version.
*/
public static final String STORAGE_VERSION_HEADER = PREFIX_FOR_STORAGE_HEADER + "version";
/**
* The current storage version header value.
*/
public static final String TARGET_STORAGE_VERSION = "2015-12-11";
/**
* The header that specifies the next visible time for a queue message.
*/
public static final String TIME_NEXT_VISIBLE_HEADER = PREFIX_FOR_STORAGE_HEADER + "time-next-visible";
/**
* The UserAgent header.
*/
public static final String USER_AGENT = "User-Agent";
/**
* Specifies the value to use for UserAgent header.
*/
public static final String USER_AGENT_PREFIX = "Azure-Storage";
/**
* Specifies the value to use for UserAgent header.
*/
public static final String USER_AGENT_VERSION = "4.3.0";
/**
* The default type for content-type and accept
*/
public static final String XML_TYPE = "application/xml";
}
/**
* Defines constants for use with query strings.
*/
public static class QueryConstants {
/**
* The query component for the api version.
*/
public static final String API_VERSION = "api-version";
/**
* Query component for SAS cache control.
*/
public static final String CACHE_CONTROL = "rscc";
/**
* Query component for SAS content type.
*/
public static final String CONTENT_TYPE = "rsct";
/**
* Query component for SAS content encoding.
*/
public static final String CONTENT_ENCODING = "rsce";
/**
* Query component for SAS content language.
*/
public static final String CONTENT_LANGUAGE = "rscl";
/**
* Query component for SAS content disposition.
*/
public static final String CONTENT_DISPOSITION = "rscd";
/**
* Query component for the operation (component) to access.
*/
public static final String COMPONENT = "comp";
/**
* Query component for copy.
*/
public static final String COPY = "copy";
/**
* Query component for the copy ID.
*/
public static final String COPY_ID = "copyid";
/**
* The query component for the SAS end partition key.
*/
public static final String END_PARTITION_KEY = "epk";
/**
* The query component for the SAS end row key.
*/
public static final String END_ROW_KEY = "erk";
/**
* Query component value for list.
*/
public static final String LIST = "list";
/**
* Query component value for properties.
*/
public static final String PROPERTIES = "properties";
/**
* Query component for resource type.
*/
public static final String RESOURCETYPE = "restype";
/**
* The query component for the SAS table name.
*/
public static final String SAS_TABLE_NAME = "tn";
/**
* The query component for the SAS signature.
*/
public static final String SIGNATURE = "sig";
/**
* The query component for the signed SAS expiry time.
*/
public static final String SIGNED_EXPIRY = "se";
/**
* The query component for the signed SAS identifier.
*/
public static final String SIGNED_IDENTIFIER = "si";
/**
* The query component for the signed SAS IP address.
*/
public static final String SIGNED_IP = "sip";
/**
* The query component for the signing SAS key.
*/
public static final String SIGNED_KEY = "sk";
/**
* The query component for the signed SAS permissions.
*/
public static final String SIGNED_PERMISSIONS = "sp";
/**
* The query component for the signed SAS Internet protocols.
*/
public static final String SIGNED_PROTOCOLS = "spr";
/**
* The query component for the signed SAS resource.
*/
public static final String SIGNED_RESOURCE = "sr";
/**
* The query component for the signed SAS resource type.
*/
public static final String SIGNED_RESOURCE_TYPE = "srt";
/**
* The query component for the signed SAS service.
*/
public static final String SIGNED_SERVICE = "ss";
/**
* The query component for the signed SAS start time.
*/
public static final String SIGNED_START = "st";
/**
* The query component for the signed SAS version.
*/
public static final String SIGNED_VERSION = "sv";
/**
* The query component for snapshot time.
*/
public static final String SNAPSHOT = "snapshot";
/**
* The query component for the SAS start partition key.
*/
public static final String START_PARTITION_KEY = "spk";
/**
* The query component for the SAS start row key.
*/
public static final String START_ROW_KEY = "srk";
/**
* The query component for stats.
*/
public static final String STATS = "stats";
/**
* The query component for delimiter.
*/
public static final String DELIMITER = "delimiter";
/**
* The query component for include.
*/
public static final String INCLUDE = "include";
/**
* The query component for marker.
*/
public static final String MARKER = "marker";
/**
* The query component for max results.
*/
public static final String MAX_RESULTS = "maxresults";
/**
* The query component for metadata.
*/
public static final String METADATA = "metadata";
/**
* The query component for prefix.
*/
public static final String PREFIX = "prefix";
/**
* The query component for acl.
*/
public static final String ACL = "acl";
}
/**
* The master Microsoft Azure Storage header prefix.
*/
public static final String PREFIX_FOR_STORAGE_HEADER = "x-ms-";
/**
* Constant representing a kilobyte (Non-SI version).
*/
public static final int KB = 1024;
/**
* Constant representing a megabyte (Non-SI version).
*/
public static final int MB = 1024 * KB;
/**
* Constant representing a gigabyte (Non-SI version).
*/
public static final int GB = 1024 * MB;
/**
* XML element for an access policy.
*/
public static final String ACCESS_POLICY = "AccessPolicy";
/**
* Buffer width used to copy data to output streams.
*/
public static final int BUFFER_COPY_LENGTH = 8 * KB;
/**
* XML element for the copy completion time.
*/
public static final String COPY_COMPLETION_TIME_ELEMENT = "CopyCompletionTime";
/**
* XML element for the copy id.
*/
public static final String COPY_ID_ELEMENT = "CopyId";
/**
* XML element for the copy progress.
*/
public static final String COPY_PROGRESS_ELEMENT = "CopyProgress";
/**
* XML element for the copy source .
*/
public static final String COPY_SOURCE_ELEMENT = "CopySource";
/**
* XML element for the copy status description.
*/
public static final String COPY_STATUS_DESCRIPTION_ELEMENT = "CopyStatusDescription";
/**
* XML element for the copy status.
*/
public static final String COPY_STATUS_ELEMENT = "CopyStatus";
/**
* Default read timeout. 5 min * 60 seconds * 1000 ms
*/
public static final int DEFAULT_READ_TIMEOUT = 5 * 60 * 1000;
/**
* XML element for delimiters.
*/
public static final String DELIMITER_ELEMENT = "Delimiter";
/**
* Http GET method.
*/
public static final String HTTP_GET = "GET";
/**
* Http PUT method.
*/
public static final String HTTP_PUT = "PUT";
/**
* Http DELETE method.
*/
public static final String HTTP_DELETE = "DELETE";
/**
* Http HEAD method.
*/
public static final String HTTP_HEAD = "HEAD";
/**
* Http POST method.
*/
public static final String HTTP_POST = "POST";
/**
* An empty <code>String</code> to use for comparison.
*/
public static final String EMPTY_STRING = "";
/**
* XML element for page range end elements.
*/
public static final String END_ELEMENT = "End";
/**
* XML element for error codes.
*/
public static final String ERROR_CODE = "Code";
/**
* XML element for exception details.
*/
public static final String ERROR_EXCEPTION = "ExceptionDetails";
/**
* XML element for exception messages.
*/
public static final String ERROR_EXCEPTION_MESSAGE = "ExceptionMessage";
/**
* XML element for stack traces.
*/
public static final String ERROR_EXCEPTION_STACK_TRACE = "StackTrace";
/**
* XML element for error messages.
*/
public static final String ERROR_MESSAGE = "Message";
/**
* XML root element for errors.
*/
public static final String ERROR_ROOT_ELEMENT = "Error";
/**
* XML element for the ETag.
*/
public static final String ETAG_ELEMENT = "Etag";
/**
* XML element for the end time of an access policy.
*/
public static final String EXPIRY = "Expiry";
/**
* Constant for False.
*/
public static final String FALSE = "false";
/**
* Constant for bootstrap geo-replication status.
*/
public static final String GEO_BOOTSTRAP_VALUE = "bootstrap";
/**
* Constant for live geo-replication status.
*/
public static final String GEO_LIVE_VALUE = "live";
/**
* Constant for unavailable geo-replication status.
*/
public static final String GEO_UNAVAILABLE_VALUE = "unavailable";
/**
* Specifies HTTP.
*/
public static final String HTTP = "http";
/**
* Specifies HTTPS.
*/
public static final String HTTPS = "https";
/**
* Specifies both HTTPS and HTTP.
*/
public static final String HTTPS_HTTP = "https,http";
/**
* XML attribute for IDs.
*/
public static final String ID = "Id";
/**
* XML element for an invalid metadata name.
*/
public static final String INVALID_METADATA_NAME = "x-ms-invalid-name";
/**
* XML element for the last modified date.
*/
public static final String LAST_MODIFIED_ELEMENT = "Last-Modified";
/**
* Lease break period max in seconds.
*/
public static final int LEASE_BREAK_PERIOD_MAX = 60;
/**
* Lease break period min in seconds.
*/
public static final int LEASE_BREAK_PERIOD_MIN = 0;
/**
* XML element for the lease duration.
*/
public static final String LEASE_DURATION_ELEMENT = "LeaseDuration";
/**
* Lease duration max in seconds.
*/
public static final int LEASE_DURATION_MAX = 60;
/**
* Lease duration min in seconds.
*/
public static final int LEASE_DURATION_MIN = 15;
/**
* XML element for the lease state.
*/
public static final String LEASE_STATE_ELEMENT = "LeaseState";
/**
* XML element for the lease status.
*/
public static final String LEASE_STATUS_ELEMENT = "LeaseStatus";
/**
* Constant signaling the resource is locked.
*/
public static final String LOCKED_VALUE = "Locked";
/**
* XML element for a marker.
*/
public static final String MARKER_ELEMENT = "Marker";
/**
* The maximum size of a single block.
*/
public static int MAX_BLOCK_SIZE = 4 * MB;
/**
* The default write size, in bytes, used by {@link BlobOutputStream} or {@link FileOutputStream}.
*/
public static final int DEFAULT_STREAM_WRITE_IN_BYTES = Constants.MAX_BLOCK_SIZE;
/**
* The default minimum read size, in bytes, for a {@link BlobInputStream} or {@link FileInputStream}.
*/
public static final int DEFAULT_MINIMUM_READ_SIZE_IN_BYTES = Constants.MAX_BLOCK_SIZE;
/**
* The maximum size, in bytes, of a given stream mark operation.
*/
// Note if BlobConstants.MAX_SINGLE_UPLOAD_BLOB_SIZE_IN_BYTES is updated then this needs to be as well.
public static final int MAX_MARK_LENGTH = 64 * MB;
/**
* XML element for maximum results.
*/
public static final String MAX_RESULTS_ELEMENT = "MaxResults";
/**
* Maximum number of shared access policy identifiers supported by server.
*/
public static final int MAX_SHARED_ACCESS_POLICY_IDENTIFIERS = 5;
/**
* Number of default concurrent requests for parallel operation.
*/
public static final int MAXIMUM_SEGMENTED_RESULTS = 5000;
/**
* XML element for the metadata.
*/
public static final String METADATA_ELEMENT = "Metadata";
/**
* XML element for names.
*/
public static final String NAME_ELEMENT = "Name";
/**
* XML element for the next marker.
*/
public static final String NEXT_MARKER_ELEMENT = "NextMarker";
/**
* The size of a page, in bytes, in a page blob.
*/
public static final int PAGE_SIZE = 512;
/**
* XML element for the permission of an access policy.
*/
public static final String PERMISSION = "Permission";
/**
* XML element for a prefix.
*/
public static final String PREFIX_ELEMENT = "Prefix";
/**
* XML element for properties.
*/
public static final String PROPERTIES = "Properties";
/**
* XML element for the server encryption status.
*/
public static final String SERVER_ENCRYPTION_STATUS_ELEMENT = "ServerEncrypted";
/**
* XML element for a signed identifier.
*/
public static final String SIGNED_IDENTIFIER_ELEMENT = "SignedIdentifier";
/**
* XML element for signed identifiers.
*/
public static final String SIGNED_IDENTIFIERS_ELEMENT = "SignedIdentifiers";
/**
* XML element for the start time of an access policy.
*/
public static final String START = "Start";
/**
* Constant for True.
*/
public static final String TRUE = "true";
/**
* Constant signaling the resource is unlocked.
*/
public static final String UNLOCKED_VALUE = "Unlocked";
/**
* Constant signaling the resource lease duration, state or status is unspecified.
*/
public static final String UNSPECIFIED_VALUE = "Unspecified";
/**
* XML element for the URL.
*/
public static final String URL_ELEMENT = "Url";
/**
* The default type for content-type and accept
*/
public static final String UTF8_CHARSET = "UTF-8";
/**
* Private Default Ctor
*/
private Constants() {
// No op
}
}
| |
package com.wincom.mstar.domain;
import java.util.ArrayList;
import java.util.List;
public class CLogObjUIExample {
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
protected String orderByClause;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
protected boolean distinct;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
protected List<Criteria> oredCriteria;
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
public CLogObjUIExample() {
oredCriteria = new ArrayList<Criteria>();
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
public void setOrderByClause(String orderByClause) {
this.orderByClause = orderByClause;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
public String getOrderByClause() {
return orderByClause;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
public void setDistinct(boolean distinct) {
this.distinct = distinct;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
public boolean isDistinct() {
return distinct;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
public List<Criteria> getOredCriteria() {
return oredCriteria;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
public void or(Criteria criteria) {
oredCriteria.add(criteria);
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
public Criteria or() {
Criteria criteria = createCriteriaInternal();
oredCriteria.add(criteria);
return criteria;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
public Criteria createCriteria() {
Criteria criteria = createCriteriaInternal();
if (oredCriteria.size() == 0) {
oredCriteria.add(criteria);
}
return criteria;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
protected Criteria createCriteriaInternal() {
Criteria criteria = new Criteria();
return criteria;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
public void clear() {
oredCriteria.clear();
orderByClause = null;
distinct = false;
}
/**
* This class was generated by MyBatis Generator.
* This class corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
protected abstract static class GeneratedCriteria {
protected List<Criterion> criteria;
protected GeneratedCriteria() {
super();
criteria = new ArrayList<Criterion>();
}
public boolean isValid() {
return criteria.size() > 0;
}
public List<Criterion> getAllCriteria() {
return criteria;
}
public List<Criterion> getCriteria() {
return criteria;
}
protected void addCriterion(String condition) {
if (condition == null) {
throw new RuntimeException("Value for condition cannot be null");
}
criteria.add(new Criterion(condition));
}
protected void addCriterion(String condition, Object value, String property) {
if (value == null) {
throw new RuntimeException("Value for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value));
}
protected void addCriterion(String condition, Object value1, Object value2, String property) {
if (value1 == null || value2 == null) {
throw new RuntimeException("Between values for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value1, value2));
}
public Criteria andLogobjidIsNull() {
addCriterion("LogObjID is null");
return (Criteria) this;
}
public Criteria andLogobjidIsNotNull() {
addCriterion("LogObjID is not null");
return (Criteria) this;
}
public Criteria andLogobjidEqualTo(Integer value) {
addCriterion("LogObjID =", value, "logobjid");
return (Criteria) this;
}
public Criteria andLogobjidNotEqualTo(Integer value) {
addCriterion("LogObjID <>", value, "logobjid");
return (Criteria) this;
}
public Criteria andLogobjidGreaterThan(Integer value) {
addCriterion("LogObjID >", value, "logobjid");
return (Criteria) this;
}
public Criteria andLogobjidGreaterThanOrEqualTo(Integer value) {
addCriterion("LogObjID >=", value, "logobjid");
return (Criteria) this;
}
public Criteria andLogobjidLessThan(Integer value) {
addCriterion("LogObjID <", value, "logobjid");
return (Criteria) this;
}
public Criteria andLogobjidLessThanOrEqualTo(Integer value) {
addCriterion("LogObjID <=", value, "logobjid");
return (Criteria) this;
}
public Criteria andLogobjidIn(List<Integer> values) {
addCriterion("LogObjID in", values, "logobjid");
return (Criteria) this;
}
public Criteria andLogobjidNotIn(List<Integer> values) {
addCriterion("LogObjID not in", values, "logobjid");
return (Criteria) this;
}
public Criteria andLogobjidBetween(Integer value1, Integer value2) {
addCriterion("LogObjID between", value1, value2, "logobjid");
return (Criteria) this;
}
public Criteria andLogobjidNotBetween(Integer value1, Integer value2) {
addCriterion("LogObjID not between", value1, value2, "logobjid");
return (Criteria) this;
}
}
/**
* This class was generated by MyBatis Generator.
* This class corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated do_not_delete_during_merge Thu Mar 02 11:23:21 CST 2017
*/
public static class Criteria extends GeneratedCriteria {
protected Criteria() {
super();
}
}
/**
* This class was generated by MyBatis Generator.
* This class corresponds to the database table nfjd502.dbo.CLogObjUI
*
* @mbggenerated Thu Mar 02 11:23:21 CST 2017
*/
public static class Criterion {
private String condition;
private Object value;
private Object secondValue;
private boolean noValue;
private boolean singleValue;
private boolean betweenValue;
private boolean listValue;
private String typeHandler;
public String getCondition() {
return condition;
}
public Object getValue() {
return value;
}
public Object getSecondValue() {
return secondValue;
}
public boolean isNoValue() {
return noValue;
}
public boolean isSingleValue() {
return singleValue;
}
public boolean isBetweenValue() {
return betweenValue;
}
public boolean isListValue() {
return listValue;
}
public String getTypeHandler() {
return typeHandler;
}
protected Criterion(String condition) {
super();
this.condition = condition;
this.typeHandler = null;
this.noValue = true;
}
protected Criterion(String condition, Object value, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.typeHandler = typeHandler;
if (value instanceof List<?>) {
this.listValue = true;
} else {
this.singleValue = true;
}
}
protected Criterion(String condition, Object value) {
this(condition, value, null);
}
protected Criterion(String condition, Object value, Object secondValue, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.secondValue = secondValue;
this.typeHandler = typeHandler;
this.betweenValue = true;
}
protected Criterion(String condition, Object value, Object secondValue) {
this(condition, value, secondValue, null);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.File;
import java.io.FileFilter;
import java.io.IOError;
import java.io.IOException;
import java.util.*;
import org.apache.commons.lang.StringUtils;
import com.google.common.collect.ImmutableMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.config.*;
import org.apache.cassandra.db.compaction.LeveledManifest;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.io.util.MmappedSegmentedFile;
import org.apache.cassandra.io.sstable.*;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.utils.CLibrary;
import org.apache.cassandra.utils.Pair;
/**
* Encapsulate handling of paths to the data files.
*
* The directory layout is the following:
* /<path_to_data_dir>/ks/cf1/ks-cf1-hb-1-Data.db
* /cf2/ks-cf2-hb-1-Data.db
* ...
*
* In addition, more that one 'root' data directory can be specified so that
* <path_to_data_dir> potentially represents multiple locations.
* Note that in the case of multiple locations, the manifest for the leveled
* compaction is only in one of the location.
*
* Snapshots (resp. backups) are always created along the sstables thare are
* snapshoted (resp. backuped) but inside a subdirectory named 'snapshots'
* (resp. backups) (and snapshots are furter inside a subdirectory of the name
* of the snapshot).
*
* This class abstracts all those details from the rest of the code.
*/
public class Directories
{
private static Logger logger = LoggerFactory.getLogger(Directories.class);
public static final String BACKUPS_SUBDIR = "backups";
public static final String SNAPSHOT_SUBDIR = "snapshots";
public static final char SECONDARY_INDEX_NAME_SEPARATOR = '.';
public static final File[] dataFileLocations;
static
{
String[] locations = DatabaseDescriptor.getAllDataFileLocations();
dataFileLocations = new File[locations.length];
for (int i = 0; i < locations.length; ++i)
dataFileLocations[i] = new File(locations[i]);
}
private final String tablename;
private final String cfname;
private final File[] sstableDirectories;
public static Directories create(String tablename, String cfname)
{
int idx = cfname.indexOf(SECONDARY_INDEX_NAME_SEPARATOR);
if (idx > 0)
// secondary index, goes in the same directory than the base cf
return new Directories(tablename, cfname, cfname.substring(0, idx));
else
return new Directories(tablename, cfname, cfname);
}
private Directories(String tablename, String cfname, String directoryName)
{
this.tablename = tablename;
this.cfname = cfname;
this.sstableDirectories = new File[dataFileLocations.length];
for (int i = 0; i < dataFileLocations.length; ++i)
sstableDirectories[i] = new File(dataFileLocations[i], join(tablename, directoryName));
if (!StorageService.instance.isClientMode())
{
try
{
for (File dir : sstableDirectories)
FileUtils.createDirectory(dir);
}
catch (IOException e)
{
throw new IOError(e);
}
}
}
public File getDirectoryForNewSSTables(long estimatedSize)
{
File path = getLocationWithMaximumAvailableSpace(estimatedSize);
// Requesting GC has a chance to free space only if we're using mmap and a non SUN jvm
if (path == null
&& (DatabaseDescriptor.getDiskAccessMode() == Config.DiskAccessMode.mmap || DatabaseDescriptor.getIndexAccessMode() == Config.DiskAccessMode.mmap)
&& !MmappedSegmentedFile.isCleanerAvailable())
{
StorageService.instance.requestGC();
// retry after GCing has forced unmap of compacted SSTables so they can be deleted
// Note: GCInspector will do this already, but only sun JVM supports GCInspector so far
SSTableDeletingTask.rescheduleFailedTasks();
try
{
Thread.sleep(10000);
}
catch (InterruptedException e)
{
throw new AssertionError(e);
}
path = getLocationWithMaximumAvailableSpace(estimatedSize);
}
return path;
}
/*
* Loop through all the disks to see which disk has the max free space
* return the disk with max free space for compactions. If the size of the expected
* compacted file is greater than the max disk space available return null, we cannot
* do compaction in this case.
*/
public File getLocationWithMaximumAvailableSpace(long estimatedSize)
{
long maxFreeDisk = 0;
File maxLocation = null;
for (File dir : sstableDirectories)
{
if (maxFreeDisk < dir.getUsableSpace())
{
maxFreeDisk = dir.getUsableSpace();
maxLocation = dir;
}
}
logger.debug("expected data files size is {}; largest free partition has {} bytes free", estimatedSize, maxFreeDisk);
// Load factor of 0.9 we do not want to use the entire disk that is too risky.
maxFreeDisk = (long)(0.9 * maxFreeDisk);
return estimatedSize < maxFreeDisk ? maxLocation : null;
}
public static File getSnapshotDirectory(Descriptor desc, String snapshotName)
{
return getOrCreate(desc.directory, SNAPSHOT_SUBDIR, snapshotName);
}
public static File getBackupsDirectory(Descriptor desc)
{
return getOrCreate(desc.directory, BACKUPS_SUBDIR);
}
public SSTableLister sstableLister()
{
return new SSTableLister();
}
public class SSTableLister
{
private boolean skipCompacted;
private boolean skipTemporary;
private boolean includeBackups;
private int nbFiles;
private final Map<Descriptor, Set<Component>> components = new HashMap<Descriptor, Set<Component>>();
private boolean filtered;
public SSTableLister skipCompacted(boolean b)
{
if (filtered)
throw new IllegalStateException("list() has already been called");
skipCompacted = b;
return this;
}
public SSTableLister skipTemporary(boolean b)
{
if (filtered)
throw new IllegalStateException("list() has already been called");
skipTemporary = b;
return this;
}
public SSTableLister includeBackups(boolean b)
{
if (filtered)
throw new IllegalStateException("list() has already been called");
includeBackups = b;
return this;
}
public Map<Descriptor, Set<Component>> list()
{
filter();
return ImmutableMap.copyOf(components);
}
public List<File> listFiles()
{
filter();
List<File> l = new ArrayList<File>(nbFiles);
for (Map.Entry<Descriptor, Set<Component>> entry : components.entrySet())
{
for (Component c : entry.getValue())
{
l.add(new File(entry.getKey().filenameFor(c)));
}
}
return l;
}
private void filter()
{
if (filtered)
return;
for (File location : sstableDirectories)
{
location.listFiles(getFilter());
if (includeBackups)
new File(location, BACKUPS_SUBDIR).listFiles(getFilter());
}
filtered = true;
}
private FileFilter getFilter()
{
// Note: the prefix needs to include cfname + separator to distinguish between a cfs and it's secondary indexes
final String sstablePrefix = tablename + Component.separator + cfname + Component.separator;
return new FileFilter()
{
// This function always return false since accepts adds to the components map
public boolean accept(File file)
{
// we are only interested in the SSTable files that belong to the specific ColumnFamily
if (file.isDirectory() || !file.getName().startsWith(sstablePrefix))
return false;
Pair<Descriptor, Component> pair = SSTable.tryComponentFromFilename(file.getParentFile(), file.getName());
if (pair == null)
return false;
if (skipCompacted && new File(pair.left.filenameFor(Component.COMPACTED_MARKER)).exists())
return false;
if (skipTemporary && pair.left.temporary)
return false;
Set<Component> previous = components.get(pair.left);
if (previous == null)
{
previous = new HashSet<Component>();
components.put(pair.left, previous);
}
previous.add(pair.right);
nbFiles++;
return false;
}
};
}
}
public File tryGetLeveledManifest()
{
for (File dir : sstableDirectories)
{
File manifestFile = new File(dir, cfname + LeveledManifest.EXTENSION);
if (manifestFile.exists())
{
logger.debug("Found manifest at {}", manifestFile);
return manifestFile;
}
}
logger.debug("No level manifest found");
return null;
}
public File getOrCreateLeveledManifest()
{
File manifestFile = tryGetLeveledManifest();
if (manifestFile == null)
manifestFile = new File(sstableDirectories[0], cfname + LeveledManifest.EXTENSION);
return manifestFile;
}
public void snapshotLeveledManifest(String snapshotName) throws IOException
{
File manifest = tryGetLeveledManifest();
if (manifest != null)
{
File snapshotDirectory = getOrCreate(manifest.getParentFile(), SNAPSHOT_SUBDIR, snapshotName);
CLibrary.createHardLink(manifest, new File(snapshotDirectory, manifest.getName()));
}
}
public boolean snapshotExists(String snapshotName)
{
for (File dir : sstableDirectories)
{
File snapshotDir = new File(dir, join(SNAPSHOT_SUBDIR, snapshotName));
if (snapshotDir.exists())
return true;
}
return false;
}
public void clearSnapshot(String snapshotName) throws IOException
{
// If snapshotName is empty or null, we will delete the entire snapshot directory
String tag = snapshotName == null ? "" : snapshotName;
for (File dir : sstableDirectories)
{
File snapshotDir = new File(dir, join(SNAPSHOT_SUBDIR, tag));
if (snapshotDir.exists())
{
if (logger.isDebugEnabled())
logger.debug("Removing snapshot directory " + snapshotDir);
FileUtils.deleteRecursive(snapshotDir);
}
}
}
private static File getOrCreate(File base, String... subdirs)
{
File dir = subdirs == null || subdirs.length == 0 ? base : new File(base, join(subdirs));
if (dir.exists())
{
if (!dir.isDirectory())
throw new IOError(new IOException(String.format("Invalid directory path %s: path exists but is not a directory", dir)));
}
else if (!dir.mkdirs())
{
throw new IOError(new IOException("Unable to create directory " + dir));
}
return dir;
}
private static String join(String... s)
{
return StringUtils.join(s, File.separator);
}
/**
* To check if sstables needs migration, we look at the System directory.
* If it contains a directory for the status cf, we'll attempt a sstable
* migration.
* Note that it is mostly harmless to try a migration uselessly, except
* maybe for some wasted cpu cycles.
*/
public static boolean sstablesNeedsMigration()
{
if (StorageService.instance.isClientMode())
return false;
boolean hasSystemKeyspace = false;
for (File location : dataFileLocations)
{
File systemDir = new File(location, Table.SYSTEM_TABLE);
hasSystemKeyspace |= (systemDir.exists() && systemDir.isDirectory());
File statusCFDir = new File(systemDir, SystemTable.STATUS_CF);
if (statusCFDir.exists())
return false;
}
if (!hasSystemKeyspace)
// This is a brand new node.
return false;
// Check whether the migration migth create too long a filename
int longestLocation = -1;
try
{
for (File loc : dataFileLocations)
longestLocation = Math.max(longestLocation, loc.getCanonicalPath().length());
}
catch (IOException e)
{
throw new IOError(e);
}
for (KSMetaData ksm : Schema.instance.getTableDefinitions())
{
String ksname = ksm.name;
for (Map.Entry<String, CFMetaData> entry : ksm.cfMetaData().entrySet())
{
String cfname = entry.getKey();
// max path is roughly (guess-estimate) <location>/ksname/cfname/snapshots/1324314347102-somename/ksname-cfname-tmp-hb-1024-Statistics.db
if (longestLocation + (ksname.length() + cfname.length()) * 2 + 62 > 256)
throw new RuntimeException("Starting with 1.1, keyspace names and column family names must be less than 32 characters long. "
+ ksname + "/" + cfname + " doesn't respect that restriction. Please rename your keyspace/column families to respect that restriction before updating.");
}
}
return true;
}
/**
* Move sstables from the pre-#2749 layout to their new location/names.
* This involves:
* - moving each sstable to their CF specific directory
* - rename the sstable to include the keyspace in the filename
*
* Note that this also move leveled manifests, snapshots and backups.
*/
public static void migrateSSTables()
{
logger.info("Upgrade from pre-1.1 version detected: migrating sstables to new directory layout");
for (File location : dataFileLocations)
{
if (!location.exists() || !location.isDirectory())
continue;
for (File ksDir : location.listFiles())
{
if (!ksDir.isDirectory())
continue;
for (File file : ksDir.listFiles())
migrateFile(file, ksDir, null);
migrateSnapshots(ksDir);
migrateBackups(ksDir);
}
}
}
private static void migrateSnapshots(File ksDir)
{
File snapshotDir = new File(ksDir, SNAPSHOT_SUBDIR);
if (!snapshotDir.exists())
return;
for (File snapshot : snapshotDir.listFiles())
{
if (!snapshot.isDirectory())
continue;
for (File f : snapshot.listFiles())
migrateFile(f, ksDir, join(SNAPSHOT_SUBDIR, snapshot.getName()));
if (!snapshot.delete())
logger.info("Old snapsot directory {} not deleted by migraation as it is not empty", snapshot);
}
if (!snapshotDir.delete())
logger.info("Old directory {} not deleted by migration as it is not empty", snapshotDir);
}
private static void migrateBackups(File ksDir)
{
File backupDir = new File(ksDir, BACKUPS_SUBDIR);
if (!backupDir.exists())
return;
for (File f : backupDir.listFiles())
migrateFile(f, ksDir, BACKUPS_SUBDIR);
if (!backupDir.delete())
logger.info("Old directory {} not deleted by migration as it is not empty", backupDir);
}
private static void migrateFile(File file, File ksDir, String additionalPath)
{
try
{
if (file.isDirectory())
return;
String name = file.getName();
boolean isManifest = name.endsWith(LeveledManifest.EXTENSION);
String cfname = isManifest
? name.substring(0, name.length() - LeveledManifest.EXTENSION.length())
: name.substring(0, name.indexOf(Component.separator));
int idx = cfname.indexOf(SECONDARY_INDEX_NAME_SEPARATOR); // idx > 0 => secondary index
String dirname = idx > 0 ? cfname.substring(0, idx) : cfname;
File destDir = getOrCreate(ksDir, dirname, additionalPath);
File destFile = new File(destDir, isManifest ? name : ksDir.getName() + Component.separator + name);
logger.debug(String.format("[upgrade to 1.1] Moving %s to %s", file, destFile));
FileUtils.renameWithConfirm(file, destFile);
}
catch (IOException e)
{
throw new IOError(e);
}
}
// Hack for tests, don't use otherwise
static void overrideDataDirectoriesForTest(String loc)
{
for (int i = 0; i < dataFileLocations.length; ++i)
dataFileLocations[i] = new File(loc);
}
// Hack for tests, don't use otherwise
static void resetDataDirectoriesAfterTest()
{
String[] locations = DatabaseDescriptor.getAllDataFileLocations();
for (int i = 0; i < locations.length; ++i)
dataFileLocations[i] = new File(locations[i]);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.spi;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* Defines a set of valid tuples according to the constraints on each of its constituent columns
*/
public final class TupleDomain
{
private static final TupleDomain NONE = new TupleDomain(null);
private static final TupleDomain ALL = new TupleDomain(Collections.<ColumnHandle, Domain>emptyMap());
/**
* TupleDomain is internally represented as a normalized map of each column to its
* respective allowable value Domain. Conceptually, these Domains can be thought of
* as being AND'ed together to form the representative predicate.
*
* This map is normalized in the following ways:
* 1) The map will not contain Domain.none() as any of its values. If any of the Domain
* values are Domain.none(), then the whole map will instead be null. This enforces the fact that
* any single Domain.none() value effectively turns this TupleDomain into "none" as well.
* 2) The map will not contain Domain.all() as any of its values. Our convention here is that
* any unmentioned column is equivalent to having Domain.all(). To normalize this structure,
* we remove any Domain.all() values from the map.
*/
private final Map<ColumnHandle, Domain> domains;
private TupleDomain(Map<ColumnHandle, Domain> domains)
{
if (domains == null || containsNoneDomain(domains)) {
this.domains = null;
}
else {
this.domains = Collections.unmodifiableMap(normalizeAndCopy(domains));
}
}
public static TupleDomain withColumnDomains(Map<ColumnHandle, Domain> domains)
{
return new TupleDomain(Objects.requireNonNull(domains, "domains is null"));
}
public static TupleDomain none()
{
return NONE;
}
public static TupleDomain all()
{
return ALL;
}
/**
* Convert a map of columns to values into the TupleDomain which requires
* those columns to be fixed to those values.
*/
public static TupleDomain withFixedValues(Map<ColumnHandle, Comparable<?>> fixedValues)
{
Map<ColumnHandle, Domain> domains = new HashMap<>();
for (Map.Entry<ColumnHandle, Comparable<?>> entry : fixedValues.entrySet()) {
domains.put(entry.getKey(), Domain.singleValue(entry.getValue()));
}
return withColumnDomains(domains);
}
@JsonCreator
// Available for Jackson deserialization only!
public static TupleDomain fromNullableColumnDomains(@JsonProperty("nullableColumnDomains") List<ColumnDomain> nullableColumnDomains)
{
if (nullableColumnDomains == null) {
return none();
}
return withColumnDomains(toMap(nullableColumnDomains));
}
@JsonProperty
// Available for Jackson serialization only!
public List<ColumnDomain> getNullableColumnDomains()
{
return domains == null ? null : toList(domains);
}
private static Map<ColumnHandle, Domain> toMap(List<ColumnDomain> columnDomains)
{
Map<ColumnHandle, Domain> map = new HashMap<>();
for (ColumnDomain columnDomain : columnDomains) {
if (map.containsKey(columnDomain.getColumnHandle())) {
throw new IllegalArgumentException("Duplicate column handle!");
}
map.put(columnDomain.getColumnHandle(), columnDomain.getDomain());
}
return map;
}
private static List<ColumnDomain> toList(Map<ColumnHandle, Domain> columnDomains)
{
List<ColumnDomain> list = new ArrayList<>();
for (Map.Entry<ColumnHandle, Domain> entry : columnDomains.entrySet()) {
list.add(new ColumnDomain(entry.getKey(), entry.getValue()));
}
return list;
}
private static boolean containsNoneDomain(Map<ColumnHandle, Domain> domains)
{
for (Domain domain : domains.values()) {
if (domain.isNone()) {
return true;
}
}
return false;
}
private static Map<ColumnHandle, Domain> normalizeAndCopy(Map<ColumnHandle, Domain> domains)
{
Map<ColumnHandle, Domain> map = new HashMap<>();
for (Map.Entry<ColumnHandle, Domain> entry : domains.entrySet()) {
if (!entry.getValue().isAll()) {
map.put(entry.getKey(), entry.getValue());
}
}
return map;
}
/**
* Returns true if any tuples would satisfy this TupleDomain
*/
@JsonIgnore
public boolean isAll()
{
return domains != null && domains.isEmpty();
}
/**
* Returns true if no tuple could ever satisfy this TupleDomain
*/
@JsonIgnore
public boolean isNone()
{
return domains == null;
}
/**
* Gets the TupleDomain as a map of each column to its respective Domain.
* - You must check to make sure that this TupleDomain is not None before calling this method
* - Unmentioned columns have an implicit value of Domain.all()
* - The column Domains can be thought of as AND'ed to together to form the whole predicate
*/
@JsonIgnore
public Map<ColumnHandle, Domain> getDomains()
{
if (domains == null) {
throw new IllegalStateException("Can not get column Domains from a none TupleDomain");
}
return domains;
}
/**
* Extract all column constraints that require exactly one value in their respective Domains.
*/
public Map<ColumnHandle, Comparable<?>> extractFixedValues()
{
if (isNone()) {
return Collections.emptyMap();
}
Map<ColumnHandle, Comparable<?>> fixedValues = new HashMap<>();
for (Map.Entry<ColumnHandle, Domain> entry : getDomains().entrySet()) {
if (entry.getValue().isSingleValue()) {
fixedValues.put(entry.getKey(), entry.getValue().getSingleValue());
}
}
return fixedValues;
}
/**
* Returns the strict intersection of the TupleDomains.
* The resulting TupleDomain represents the set of tuples that would would be valid
* in both TupleDomains.
*/
public TupleDomain intersect(TupleDomain other)
{
if (this.isNone() || other.isNone()) {
return none();
}
Map<ColumnHandle, Domain> intersected = new HashMap<>(this.getDomains());
for (Map.Entry<ColumnHandle, Domain> entry : other.getDomains().entrySet()) {
Domain intersectionDomain = intersected.get(entry.getKey());
if (intersectionDomain == null) {
intersected.put(entry.getKey(), entry.getValue());
}
else {
intersected.put(entry.getKey(), intersectionDomain.intersect(entry.getValue()));
}
}
return withColumnDomains(intersected);
}
/**
* Returns a TupleDomain in which corresponding column Domains are unioned together.
*
* Note that this is NOT equivalent to a strict union as the final result may allow tuples
* that do not exist in either TupleDomain.
* For example:
* TupleDomain X: a => 1, b => 2
* TupleDomain Y: a => 2, b => 3
* Column-wise unioned TupleDomain: a = > 1 OR 2, b => 2 OR 3
* In the above resulting TupleDomain, tuple (a => 1, b => 3) would be considered valid but would
* not be valid for either TupleDomain X or TupleDomain Y.
* However, this result is guaranteed to be a superset of the strict union.
*/
public TupleDomain columnWiseUnion(TupleDomain other)
{
if (this.isNone()) {
return other;
}
else if (other.isNone()) {
return this;
}
// Only columns contained in both TupleDomains will make it into the column-wise union.
// This is b/c an unmentioned column is implicitly an "all" Domain and so any union with that "all" Domain will also be the "all" Domain.
Map<ColumnHandle, Domain> columnWiseUnioned = new HashMap<>();
for (Map.Entry<ColumnHandle, Domain> entry : this.getDomains().entrySet()) {
Domain otherDomain = other.getDomains().get(entry.getKey());
if (otherDomain != null) {
columnWiseUnioned.put(entry.getKey(), entry.getValue().union(otherDomain));
}
}
return withColumnDomains(columnWiseUnioned);
}
/**
* Returns true only if there exists a strict intersection between the TupleDomains.
* i.e. there exists some potential tuple that would be allowable in both TupleDomains.
*/
public boolean overlaps(TupleDomain other)
{
return !this.intersect(other).isNone();
}
/**
* Returns true only if the this TupleDomain contains all possible tuples that would be allowable by
* the other TupleDomain.
*/
public boolean contains(TupleDomain other)
{
return other.isNone() || this.columnWiseUnion(other).equals(this);
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (!(o instanceof TupleDomain)) {
return false;
}
TupleDomain that = (TupleDomain) o;
if (domains != null ? !domains.equals(that.domains) : that.domains != null) {
return false;
}
return true;
}
@Override
public int hashCode()
{
return domains != null ? domains.hashCode() : 0;
}
@Override
public String toString()
{
StringBuilder builder = new StringBuilder()
.append("TupleDomain:");
if (isAll()) {
builder.append("ALL");
}
else if (isNone()) {
builder.append("NONE");
}
else {
builder.append(domains);
}
return builder.toString();
}
// Available for Jackson serialization only!
public static class ColumnDomain
{
private final ColumnHandle columnHandle;
private final Domain domain;
@JsonCreator
public ColumnDomain(
@JsonProperty("columnHandle") ColumnHandle columnHandle,
@JsonProperty("domain") Domain domain)
{
this.columnHandle = Objects.requireNonNull(columnHandle, "columnHandle is null");
this.domain = Objects.requireNonNull(domain, "domain is null");
}
@JsonProperty
public ColumnHandle getColumnHandle()
{
return columnHandle;
}
@JsonProperty
public Domain getDomain()
{
return domain;
}
}
}
| |
package server;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.net.Socket;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.border.EmptyBorder;
import common.Message;
import common.StatusMessage;
public class ChatServer implements Runnable {
private ServerThread[] users;
private final int MAX_USERS = 50;
private int currentUser;
private boolean running;
private JPanel panel;
private JScrollPane scrollPane;
public ChatServer(int port) {
users = new ServerThread[MAX_USERS];
for(int x = 0; x < users.length; x++){
users[x] = null;
}
running = true;
Thread thread = new Thread(this);
thread.setName("Chat server thread");
thread.start();
}
private void init(){
JFrame frame = new JFrame();
WindowListener exitListener = new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
running = false;
}
};
frame.setTitle("Chat server");
frame.setResizable(false);
frame.addWindowListener(exitListener);
frame.setBounds(100, 100, 650, 400);
JPanel contentPane = new JPanel();
contentPane.setBorder(new EmptyBorder(5, 5, 5, 5));
frame.setContentPane(contentPane);
contentPane.setLayout(null);
scrollPane = new JScrollPane();
scrollPane.setBounds(475, 10, 101, 339);
scrollPane.setViewportView(panel);
scrollPane.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS);
contentPane.add(scrollPane);
panel = new JPanel();
panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS));
scrollPane.setViewportView(panel);
JPanel panel_2 = new JPanel();
panel_2.setBounds(10, 11, 455, 295);
contentPane.add(panel_2);
panel_2.setLayout(null);
JTextArea textArea = new JTextArea();
textArea.setBounds(0, 0, 455, 295);
textArea.setEditable(false);
panel_2.add(textArea);
JPanel panel_3 = new JPanel();
panel_3.setBounds(10, 317, 455, 33);
contentPane.add(panel_3);
panel_3.setLayout(null);
JButton endServer = new JButton("End Server");
endServer.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
running = false;
}
});
endServer.setBounds(10, 0, 125, 23);
panel_3.add(endServer);
JButton resetConnection = new JButton("Reset Connection");
resetConnection.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
resetConnection();
}
});
resetConnection.setBounds(303, 0, 125, 23);
panel_3.add(resetConnection);
frame.setVisible(true);
}
private void resetConnection() {
for (int y = 0; y < users.length; y++) {
if (users[y] != null){
users[y].kill();
users[y] = null;
}
}
resetButtons();
}
private void resetButtons(){
JButton temp;
panel.removeAll();
for(int x = 0; x < users.length; x++){
if(users[x] != null){
temp = new JButton(users[x].getID());
temp.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
removeUser(((JButton) e.getSource()).getText());
}
});
panel.add(temp);
}
}
scrollPane.validate();
scrollPane.repaint();
panel.validate();
panel.repaint();
}
private void removeUser(String id){
users[Integer.parseInt(id.substring(0,2))].kill();
users[Integer.parseInt(id.substring(0,2))] = null;
for (int y = 0; y < users.length; y++) {
if (users[y] != null){
users[y].send(new Message("", users[y].getID(), id, StatusMessage.REMOVEUSER));
}
}
resetButtons();
}
public void addThread(Socket socket) { // a new user connects
if (currentUser < MAX_USERS) {
int x = getOpenSpot();
String s = "";
users[x] = new ServerThread(socket, x);
users[x].start();
for (int y = 0; y < users.length; y++) {
if (users[y] != null && x != y){
s += users[y].getID() + ":";
System.out.println(s);
users[y].send(new Message("", users[y].getID(), users[x].getID(), StatusMessage.ADDUSER));
}
}
users[x].send(new Message("", users[x].getID(), "0", StatusMessage.NEWUSER));
if(!s.equals("")){
s = s.substring(0, s.length() -1);
users[x].send(new Message(s, users[x].getID(), "0", StatusMessage.LOADONLINE));
}
resetButtons();
}
}
private void closeServer(){
for (int y = 0; y < users.length; y++) {
if (users[y] != null){
users[y].send(new Message("", users[y].getID(), "0", StatusMessage.QUIT));
users[y].kill();
}
}
System.exit(0);
}
private int getOpenSpot(){
for(int x = 0; x < users.length; x++){
if(users[x] == null){
return x;
}
}
return -1;
}
@Override
public void run() {
Message temp;
init();
while (running) {
for (int x = 0; x < users.length; x++) {
if (users[x] != null && users[x].newInput.get() > 0) {
temp = users[x].getMessage();
if(!temp.getTo().equals("0")){ // 0 is the server id
users[temp.getToIndex()].send(temp);
}else{
if(temp.getStatus() == StatusMessage.QUIT){
removeUser(temp.getFrom());
}
}
}
}
try {
Thread.sleep(5);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
closeServer();
}
}
| |
/*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.javascript.jscomp.NodeTraversal.ScopedCallback;
import com.google.javascript.jscomp.Scope.Var;
import com.google.javascript.rhino.InputId;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import com.google.javascript.rhino.jstype.JSType;
import com.google.javascript.rhino.jstype.StaticReference;
import com.google.javascript.rhino.jstype.StaticSourceFile;
import com.google.javascript.rhino.jstype.StaticSymbolTable;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* A helper class for passes that want to access all information about where a
* variable is referenced and declared at once and then make a decision as to
* how it should be handled, possibly inlining, reordering, or generating
* warnings. Callers do this by providing {@link Behavior} and then
* calling {@link #process(Node, Node)}.
*
* @author kushal@google.com (Kushal Dave)
*/
class ReferenceCollectingCallback implements ScopedCallback,
HotSwapCompilerPass,
StaticSymbolTable<Var, ReferenceCollectingCallback.Reference> {
/**
* Maps a given variable to a collection of references to that name. Note that
* Var objects are not stable across multiple traversals (unlike scope root or
* name).
*/
private final Map<Var, ReferenceCollection> referenceMap =
Maps.newHashMap();
/**
* The stack of basic blocks and scopes the current traversal is in.
*/
private final Deque<BasicBlock> blockStack = new ArrayDeque<BasicBlock>();
/**
* Source of behavior at various points in the traversal.
*/
private final Behavior behavior;
/**
* Javascript compiler to use in traversing.
*/
private final AbstractCompiler compiler;
/**
* Only collect references for filtered variables.
*/
private final Predicate<Var> varFilter;
/**
* Constructor initializes block stack.
*/
ReferenceCollectingCallback(AbstractCompiler compiler, Behavior behavior) {
this(compiler, behavior, Predicates.<Var>alwaysTrue());
}
/**
* Constructor only collects references that match the given variable.
*
* The test for Var equality uses reference equality, so it's necessary to
* inject a scope when you traverse.
*/
ReferenceCollectingCallback(AbstractCompiler compiler, Behavior behavior,
Predicate<Var> varFilter) {
this.compiler = compiler;
this.behavior = behavior;
this.varFilter = varFilter;
}
/**
* Convenience method for running this pass over a tree with this
* class as a callback.
*/
@Override
public void process(Node externs, Node root) {
NodeTraversal.traverseRoots(
compiler, Lists.newArrayList(externs, root), this);
}
/**
* Same as process but only runs on a part of AST associated to one script.
*/
@Override
public void hotSwapScript(Node scriptRoot, Node originalRoot) {
NodeTraversal.traverse(compiler, scriptRoot, this);
}
/**
* Gets the variables that were referenced in this callback.
*/
@Override
public Iterable<Var> getAllSymbols() {
return referenceMap.keySet();
}
@Override
public Scope getScope(Var var) {
return var.scope;
}
/**
* Gets the reference collection for the given variable.
*/
@Override
public ReferenceCollection getReferences(Var v) {
return referenceMap.get(v);
}
/**
* For each node, update the block stack and reference collection
* as appropriate.
*/
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (n.getType() == Token.NAME) {
Var v;
if (n.getString().equals("arguments")) {
v = t.getScope().getArgumentsVar();
} else {
v = t.getScope().getVar(n.getString());
}
if (v != null && varFilter.apply(v)) {
addReference(t, v, new Reference(n, t, blockStack.peek()));
}
}
if (isBlockBoundary(n, parent)) {
blockStack.pop();
}
}
/**
* Updates block stack and invokes any additional behavior.
*/
@Override
public void enterScope(NodeTraversal t) {
Node n = t.getScope().getRootNode();
BasicBlock parent = blockStack.isEmpty() ? null : blockStack.peek();
blockStack.push(new BasicBlock(parent, n));
}
/**
* Updates block statck and invokes any additional behavior.
*/
@Override
public void exitScope(NodeTraversal t) {
blockStack.pop();
if (t.getScope().isGlobal()) {
// Update global scope reference lists when we are done with it.
compiler.updateGlobalVarReferences(referenceMap, t.getScopeRoot());
behavior.afterExitScope(t, compiler.getGlobalVarReferences());
} else {
behavior.afterExitScope(t, new ReferenceMapWrapper(referenceMap));
}
}
/**
* Updates block stack.
*/
@Override
public boolean shouldTraverse(NodeTraversal nodeTraversal, Node n,
Node parent) {
// If node is a new basic block, put on basic block stack
if (isBlockBoundary(n, parent)) {
blockStack.push(new BasicBlock(blockStack.peek(), n));
}
return true;
}
/**
* @return true if this node marks the start of a new basic block
*/
private static boolean isBlockBoundary(Node n, Node parent) {
if (parent != null) {
switch (parent.getType()) {
case Token.DO:
case Token.FOR:
case Token.TRY:
case Token.WHILE:
case Token.WITH:
// NOTE: TRY has up to 3 child blocks:
// TRY
// BLOCK
// BLOCK
// CATCH
// BLOCK
// Note that there is an explcit CATCH token but no explicit
// FINALLY token. For simplicity, we consider each BLOCK
// a separate basic BLOCK.
return true;
case Token.AND:
case Token.HOOK:
case Token.IF:
case Token.OR:
// The first child of a conditional is not a boundary,
// but all the rest of the children are.
return n != parent.getFirstChild();
}
}
return n.getType() == Token.CASE;
}
private void addReference(NodeTraversal t, Var v, Reference reference) {
// Create collection if none already
ReferenceCollection referenceInfo = referenceMap.get(v);
if (referenceInfo == null) {
referenceInfo = new ReferenceCollection();
referenceMap.put(v, referenceInfo);
}
// Add this particular reference
referenceInfo.add(reference, t, v);
}
interface ReferenceMap {
ReferenceCollection getReferences(Var var);
}
private static class ReferenceMapWrapper implements ReferenceMap {
private final Map<Var, ReferenceCollection> referenceMap;
public ReferenceMapWrapper(Map<Var, ReferenceCollection> referenceMap) {
this.referenceMap = referenceMap;
}
@Override
public ReferenceCollection getReferences(Var var) {
return referenceMap.get(var);
}
}
/**
* Way for callers to add specific behavior during traversal that
* utilizes the built-up reference information.
*/
interface Behavior {
/**
* Called after we finish with a scope.
*/
void afterExitScope(NodeTraversal t, ReferenceMap referenceMap);
}
static Behavior DO_NOTHING_BEHAVIOR = new Behavior() {
@Override
public void afterExitScope(NodeTraversal t, ReferenceMap referenceMap) {}
};
/**
* A collection of references. Can be subclassed to apply checks or
* store additional state when adding.
*/
static class ReferenceCollection implements Iterable<Reference> {
List<Reference> references = Lists.newArrayList();
@Override
public Iterator<Reference> iterator() {
return references.iterator();
}
void add(Reference reference, NodeTraversal t, Var v) {
references.add(reference);
}
/**
* Determines if the variable for this reference collection is
* "well-defined." A variable is well-defined if we can prove at
* compile-time that it's assigned a value before it's used.
*
* Notice that if this function returns false, this doesn't imply that the
* variable is used before it's assigned. It just means that we don't
* have enough information to make a definitive judgement.
*/
protected boolean isWellDefined() {
int size = references.size();
if (size == 0) {
return false;
}
// If this is a declaration that does not instantiate the variable,
// it's not well-defined.
Reference init = getInitializingReference();
if (init == null) {
return false;
}
Preconditions.checkState(references.get(0).isDeclaration());
BasicBlock initBlock = init.getBasicBlock();
for (int i = 1; i < size; i++) {
if (!initBlock.provablyExecutesBefore(
references.get(i).getBasicBlock())) {
return false;
}
}
return true;
}
/**
* Whether the variable is escaped into an inner scope.
*/
boolean isEscaped() {
Scope scope = null;
for (Reference ref : references) {
if (scope == null) {
scope = ref.scope;
} else if (scope != ref.scope) {
return true;
}
}
return false;
}
/**
* @param index The index into the references array to look for an
* assigning declaration.
*
* This is either the declaration if a value is assigned (such as
* "var a = 2", "function a()...", "... catch (a)...").
*/
private boolean isInitializingDeclarationAt(int index) {
Reference maybeInit = references.get(index);
if (maybeInit.isInitializingDeclaration()) {
// This is a declaration that represents the initial value.
// Specifically, var declarations without assignments such as "var a;"
// are not.
return true;
}
return false;
}
/**
* @param index The index into the references array to look for an
* initialized assignment reference. That is, an assignment immediately
* follow a variable declaration that itself does not initialize the
* variable.
*/
private boolean isInitializingAssignmentAt(int index) {
if (index < references.size() && index > 0) {
Reference maybeDecl = references.get(index-1);
if (maybeDecl.isVarDeclaration()) {
Preconditions.checkState(!maybeDecl.isInitializingDeclaration());
Reference maybeInit = references.get(index);
if (maybeInit.isSimpleAssignmentToName()) {
return true;
}
}
}
return false;
}
/**
* @return The reference that provides the value for the variable at the
* time of the first read, if known, otherwise null.
*
* This is either the variable declaration ("var a = ...") or first
* reference following the declaration if it is an assignment.
*/
Reference getInitializingReference() {
if (isInitializingDeclarationAt(0)) {
return references.get(0);
} else if (isInitializingAssignmentAt(1)) {
return references.get(1);
}
return null;
}
/**
* Constants are allowed to be defined after their first use.
*/
Reference getInitializingReferenceForConstants() {
int size = references.size();
for (int i = 0; i < size; i++) {
if (isInitializingDeclarationAt(i) || isInitializingAssignmentAt(i)) {
return references.get(i);
}
}
return null;
}
/**
* @return Whether the variable is only assigned a value once for its
* lifetime.
*/
boolean isAssignedOnceInLifetime() {
Reference ref = getOneAndOnlyAssignment();
if (ref == null) {
return false;
}
// Make sure this assignment is not in a loop.
for (BasicBlock block = ref.getBasicBlock();
block != null; block = block.getParent()) {
if (block.isFunction) {
break;
} else if (block.isLoop) {
return false;
}
}
return true;
}
/**
* @return The one and only assignment. Returns if there are 0 or 2+
* assignments.
*/
private Reference getOneAndOnlyAssignment() {
Reference assignment = null;
int size = references.size();
for (int i = 0; i < size; i++) {
Reference ref = references.get(i);
if (ref.isLvalue() || ref.isInitializingDeclaration()) {
if (assignment == null) {
assignment = ref;
} else {
return null;
}
}
}
return assignment;
}
/**
* @return Whether the variable is never assigned a value.
*/
boolean isNeverAssigned() {
int size = references.size();
for (int i = 0; i < size; i++) {
Reference ref = references.get(i);
if (ref.isLvalue() || ref.isInitializingDeclaration()) {
return false;
}
}
return true;
}
boolean firstReferenceIsAssigningDeclaration() {
int size = references.size();
if (size > 0 && references.get(0).isInitializingDeclaration()) {
return true;
}
return false;
}
}
/**
* Represents a single declaration or reference to a variable.
*/
static final class Reference implements StaticReference<JSType> {
private static final Set<Integer> DECLARATION_PARENTS =
ImmutableSet.of(Token.VAR, Token.FUNCTION, Token.CATCH);
private final Node nameNode;
private final BasicBlock basicBlock;
private final Scope scope;
private final InputId inputId;
private final StaticSourceFile sourceFile;
Reference(Node nameNode, NodeTraversal t,
BasicBlock basicBlock) {
this(nameNode, basicBlock, t.getScope(), t.getInput().getInputId());
}
// Bleeding functions are weird, because the declaration does
// not appear inside their scope. So they need their own constructor.
static Reference newBleedingFunction(NodeTraversal t,
BasicBlock basicBlock, Node func) {
return new Reference(func.getFirstChild(),
basicBlock, t.getScope(), t.getInput().getInputId());
}
/**
* Creates a variable reference in a given script file name, used in tests.
*
* @return The created reference.
*/
@VisibleForTesting
static Reference createRefForTest(CompilerInput input) {
return new Reference(new Node(Token.NAME), null, null,
input.getInputId());
}
private Reference(Node nameNode,
BasicBlock basicBlock, Scope scope, InputId inputId) {
this.nameNode = nameNode;
this.basicBlock = basicBlock;
this.scope = scope;
this.inputId = inputId;
this.sourceFile = nameNode.getStaticSourceFile();
}
@Override
public Var getSymbol() {
return scope.getVar(nameNode.getString());
}
@Override
public Node getNode() {
return nameNode;
}
public InputId getInputId() {
return inputId;
}
@Override
public StaticSourceFile getSourceFile() {
return sourceFile;
}
boolean isDeclaration() {
Node parent = getParent();
Node grandparent = parent.getParent();
return DECLARATION_PARENTS.contains(parent.getType()) ||
parent.getType() == Token.LP &&
grandparent.getType() == Token.FUNCTION;
}
boolean isVarDeclaration() {
return getParent().getType() == Token.VAR;
}
boolean isHoistedFunction() {
return NodeUtil.isHoistedFunctionDeclaration(getParent());
}
/**
* Determines whether the variable is initialized at the declaration.
*/
boolean isInitializingDeclaration() {
// VAR is the only type of variable declaration that may not initialize
// its variable. Catch blocks, named functions, and parameters all do.
return isDeclaration() &&
getParent().getType() != Token.VAR ||
nameNode.getFirstChild() != null;
}
/**
* @return For an assignment, variable declaration, or function declaration
* return the assigned value, otherwise null.
*/
Node getAssignedValue() {
Node parent = getParent();
return (parent.getType() == Token.FUNCTION)
? parent : NodeUtil.getAssignedValue(nameNode);
}
BasicBlock getBasicBlock() {
return basicBlock;
}
Node getParent() {
return getNode().getParent();
}
Node getGrandparent() {
Node parent = getParent();
return parent == null ? null : parent.getParent();
}
private static boolean isLhsOfForInExpression(Node n) {
Node parent = n.getParent();
if (parent.getType() == Token.VAR) {
return isLhsOfForInExpression(parent);
}
return NodeUtil.isForIn(parent) && parent.getFirstChild() == n;
}
boolean isSimpleAssignmentToName() {
Node parent = getParent();
return parent.getType() == Token.ASSIGN
&& parent.getFirstChild() == nameNode;
}
boolean isLvalue() {
Node parent = getParent();
int parentType = parent.getType();
return (parentType == Token.VAR && nameNode.getFirstChild() != null)
|| parentType == Token.INC
|| parentType == Token.DEC
|| (NodeUtil.isAssignmentOp(parent)
&& parent.getFirstChild() == nameNode)
|| isLhsOfForInExpression(nameNode);
}
Scope getScope() {
return scope;
}
}
/**
* Represents a section of code that is uninterrupted by control structures
* (conditional or iterative logic).
*/
static final class BasicBlock {
private final BasicBlock parent;
/**
* Determines whether the block may not be part of the normal control flow,
* but instead "hoisted" to the top of the scope.
*/
private final boolean isHoisted;
/**
* Whether this block denotes a function scope.
*/
private final boolean isFunction;
/**
* Whether this block denotes a loop.
*/
private final boolean isLoop;
/**
* Creates a new block.
* @param parent The containing block.
* @param root The root node of the block.
*/
BasicBlock(BasicBlock parent, Node root) {
this.parent = parent;
// only named functions may be hoisted.
this.isHoisted = NodeUtil.isHoistedFunctionDeclaration(root);
this.isFunction = root.getType() == Token.FUNCTION;
if (root.getParent() != null) {
int pType = root.getParent().getType();
this.isLoop = pType == Token.DO ||
pType == Token.WHILE ||
pType == Token.FOR;
} else {
this.isLoop = false;
}
}
BasicBlock getParent() {
return parent;
}
/**
* Determines whether this block is equivalent to the very first block that
* is created when reference collection traversal enters global scope. Note
* that when traversing a single script in a hot-swap fashion a new instance
* of {@code BasicBlock} is created.
*
* @return true if this is global scope block.
*/
boolean isGlobalScopeBlock() {
return getParent() == null;
}
/**
* Determines whether this block is guaranteed to begin executing before
* the given block does.
*/
boolean provablyExecutesBefore(BasicBlock thatBlock) {
// If thatBlock is a descendant of this block, and there are no hoisted
// blocks between them, then this block must start before thatBlock.
BasicBlock currentBlock;
for (currentBlock = thatBlock;
currentBlock != null && currentBlock != this;
currentBlock = currentBlock.getParent()) {
if (currentBlock.isHoisted) {
return false;
}
}
if (currentBlock == this) {
return true;
}
if (isGlobalScopeBlock() && thatBlock.isGlobalScopeBlock()) {
return true;
}
return false;
}
}
}
| |
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.client5.http.utils;
import org.apache.hc.core5.http.HttpHost;
import org.apache.hc.core5.net.URIAuthority;
import org.apache.hc.core5.net.URIBuilder;
import org.apache.hc.core5.util.Args;
import org.apache.hc.core5.util.TextUtils;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.*;
/**
* A collection of utilities for {@link URI URIs}, to workaround
* bugs within the class or for ease-of-use features.
*
* @since 4.0
*/
public class URIUtils {
/**
* A convenience method for creating a new {@link URI} whose scheme, host
* and port are taken from the target host, but whose path, query and
* fragment are taken from the existing URI. The fragment is only used if
* dropFragment is false. The path is set to "/" if not explicitly specified.
*
* @param uri
* Contains the path, query and fragment to use.
* @param target
* Contains the scheme, host and port to use.
* @param dropFragment
* True if the fragment should not be copied.
*
* @throws URISyntaxException
* If the resulting URI is invalid.
*/
public static URI rewriteURI(
final URI uri,
final HttpHost target,
final boolean dropFragment) throws URISyntaxException {
Args.notNull(uri, "URI");
if (uri.isOpaque()) {
return uri;
}
final URIBuilder uribuilder = new URIBuilder(uri);
if (target != null) {
uribuilder.setScheme(target.getSchemeName());
uribuilder.setHost(target.getHostName());
uribuilder.setPort(target.getPort());
} else {
uribuilder.setScheme(null);
uribuilder.setHost((String) null);
uribuilder.setPort(-1);
}
if (dropFragment) {
uribuilder.setFragment(null);
}
final List<String> originalPathSegments = uribuilder.getPathSegments();
final List<String> pathSegments = new ArrayList<>(originalPathSegments);
for (final Iterator<String> it = pathSegments.iterator(); it.hasNext(); ) {
final String pathSegment = it.next();
if (pathSegment.isEmpty() && it.hasNext()) {
it.remove();
}
}
if (pathSegments.size() != originalPathSegments.size()) {
uribuilder.setPathSegments(pathSegments);
}
if (pathSegments.isEmpty()) {
uribuilder.setPathSegments("");
}
return uribuilder.build();
}
/**
* A convenience method for
* {@link URIUtils#rewriteURI(URI, HttpHost, boolean)} that always keeps the
* fragment.
*/
public static URI rewriteURI(
final URI uri,
final HttpHost target) throws URISyntaxException {
return rewriteURI(uri, target, false);
}
/**
* A convenience method that creates a new {@link URI} whose scheme, host, port, path,
* query are taken from the existing URI, dropping any fragment or user-information.
* The path is set to "/" if not explicitly specified. The existing URI is returned
* unmodified if it has no fragment or user-information and has a path.
*
* @param uri
* original URI.
* @throws URISyntaxException
* If the resulting URI is invalid.
*/
public static URI rewriteURI(final URI uri) throws URISyntaxException {
Args.notNull(uri, "URI");
if (uri.isOpaque()) {
return uri;
}
final URIBuilder uribuilder = new URIBuilder(uri);
if (uribuilder.getUserInfo() != null) {
uribuilder.setUserInfo(null);
}
if (TextUtils.isEmpty(uribuilder.getPath())) {
uribuilder.setPath("/");
}
if (uribuilder.getHost() != null) {
uribuilder.setHost(uribuilder.getHost().toLowerCase(Locale.ROOT));
}
uribuilder.setFragment(null);
return uribuilder.build();
}
/**
* Resolves a URI reference against a base URI. Work-around for bug in
* java.net.URI (http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4708535)
*
* @param baseURI the base URI
* @param reference the URI reference
* @return the resulting URI
*/
public static URI resolve(final URI baseURI, final String reference) {
return resolve(baseURI, URI.create(reference));
}
/**
* Resolves a URI reference against a base URI. Work-around for bugs in
* java.net.URI (e.g. http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4708535)
*
* @param baseURI the base URI
* @param reference the URI reference
* @return the resulting URI
*/
public static URI resolve(final URI baseURI, final URI reference) {
Args.notNull(baseURI, "Base URI");
Args.notNull(reference, "Reference URI");
final String s = reference.toASCIIString();
if (s.startsWith("?")) {
String baseUri = baseURI.toASCIIString();
final int i = baseUri.indexOf('?');
baseUri = i > -1 ? baseUri.substring(0, i) : baseUri;
return URI.create(baseUri + s);
}
final boolean emptyReference = s.isEmpty();
URI resolved;
if (emptyReference) {
resolved = baseURI.resolve(URI.create("#"));
final String resolvedString = resolved.toASCIIString();
resolved = URI.create(resolvedString.substring(0, resolvedString.indexOf('#')));
} else {
resolved = baseURI.resolve(reference);
}
try {
return normalizeSyntax(resolved);
} catch (final URISyntaxException ex) {
throw new IllegalArgumentException(ex);
}
}
/**
* Removes dot segments according to RFC 3986, section 5.2.4 and
* Syntax-Based Normalization according to RFC 3986, section 6.2.2.
*
* @param uri the original URI
* @return the URI without dot segments
*/
static URI normalizeSyntax(final URI uri) throws URISyntaxException {
if (uri.isOpaque() || uri.getAuthority() == null) {
// opaque and file: URIs
return uri;
}
Args.check(uri.isAbsolute(), "Base URI must be absolute");
final URIBuilder builder = new URIBuilder(uri);
final String path = builder.getPath();
if (path != null && !path.equals("/")) {
final String[] inputSegments = path.split("/");
final Stack<String> outputSegments = new Stack<>();
for (final String inputSegment : inputSegments) {
if ((inputSegment.isEmpty()) || (".".equals(inputSegment))) {
// Do nothing
} else if ("..".equals(inputSegment)) {
if (!outputSegments.isEmpty()) {
outputSegments.pop();
}
} else {
outputSegments.push(inputSegment);
}
}
final StringBuilder outputBuffer = new StringBuilder();
for (final String outputSegment : outputSegments) {
outputBuffer.append('/').append(outputSegment);
}
if (path.lastIndexOf('/') == path.length() - 1) {
// path.endsWith("/") || path.equals("")
outputBuffer.append('/');
}
builder.setPath(outputBuffer.toString());
}
if (builder.getScheme() != null) {
builder.setScheme(builder.getScheme().toLowerCase(Locale.ROOT));
}
if (builder.getHost() != null) {
builder.setHost(builder.getHost().toLowerCase(Locale.ROOT));
}
return builder.build();
}
/**
* Extracts target host from the given {@link URI}.
*
* @param uri
* @return the target host if the URI is absolute or {@code null} if the URI is
* relative or does not contain a valid host name.
*
* @since 4.1
*/
public static HttpHost extractHost(final URI uri) {
if (uri == null) {
return null;
}
if (uri.isAbsolute()) {
if (uri.getHost() == null) { // normal parse failed; let's do it ourselves
// authority does not seem to care about the valid character-set for host names
if (uri.getAuthority() != null) {
String content = uri.getAuthority();
// Strip off any leading user credentials
int at = content.indexOf('@');
if (at != -1) {
content = content.substring(at + 1);
}
final String scheme = uri.getScheme();
final String hostname;
final int port;
at = content.indexOf(":");
if (at != -1) {
hostname = content.substring(0, at);
try {
final String portText = content.substring(at + 1);
port = !TextUtils.isEmpty(portText) ? Integer.parseInt(portText) : -1;
} catch (final NumberFormatException ex) {
return null;
}
} else {
hostname = content;
port = -1;
}
try {
return new HttpHost(scheme, hostname, port);
} catch (final IllegalArgumentException ex) {
return null;
}
}
} else {
return new HttpHost(uri.getScheme(), uri.getHost(), uri.getPort());
}
}
return null;
}
/**
* Derives the interpreted (absolute) URI that was used to generate the last
* request. This is done by extracting the request-uri and target origin for
* the last request and scanning all the redirect locations for the last
* fragment identifier, then combining the result into a {@link URI}.
*
* @param originalURI
* original request before any redirects
* @param target
* if the last URI is relative, it is resolved against this target,
* or {@code null} if not available.
* @param redirects
* collection of redirect locations since the original request
* or {@code null} if not available.
* @return interpreted (absolute) URI
*/
public static URI resolve(
final URI originalURI,
final HttpHost target,
final List<URI> redirects) throws URISyntaxException {
Args.notNull(originalURI, "Request URI");
final URIBuilder uribuilder;
if (redirects == null || redirects.isEmpty()) {
uribuilder = new URIBuilder(originalURI);
} else {
uribuilder = new URIBuilder(redirects.get(redirects.size() - 1));
String frag = uribuilder.getFragment();
// read interpreted fragment identifier from redirect locations
for (int i = redirects.size() - 1; frag == null && i >= 0; i--) {
frag = redirects.get(i).getFragment();
}
uribuilder.setFragment(frag);
}
// read interpreted fragment identifier from original request
if (uribuilder.getFragment() == null) {
uribuilder.setFragment(originalURI.getFragment());
}
// last target origin
if (target != null && !uribuilder.isAbsolute()) {
uribuilder.setScheme(target.getSchemeName());
uribuilder.setHost(target.getHostName());
uribuilder.setPort(target.getPort());
}
return uribuilder.build();
}
/**
* Convenience factory method for {@link URI} instances.
*
* @since 5.0
*/
public static URI create(final HttpHost host, final String path) throws URISyntaxException {
final URIBuilder builder = new URIBuilder(path);
if (host != null) {
builder.setHost(host.getHostName()).setPort(host.getPort()).setScheme(host.getSchemeName());
}
return builder.build();
}
/**
* Convenience factory method for {@link URI} instances.
*
* @since 5.0
*/
public static URI create(final String scheme, final URIAuthority host, final String path) throws URISyntaxException {
final URIBuilder builder = new URIBuilder(path);
if (scheme != null) {
builder.setScheme(scheme);
}
if (host != null) {
builder.setHost(host.getHostName()).setPort(host.getPort());
}
return builder.build();
}
/**
* This class should not be instantiated.
*/
private URIUtils() {
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
package fixtures.azureresource.implementation;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.AzureClient;
import com.microsoft.azure.AzureServiceClient;
import com.microsoft.azure.Resource;
import com.microsoft.rest.credentials.ServiceClientCredentials;
import com.microsoft.rest.RestClient;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.Validator;
import fixtures.azureresource.ErrorException;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import okhttp3.ResponseBody;
import retrofit2.http.Body;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.PUT;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* Initializes a new instance of the AutoRestResourceFlatteningTestServiceImpl class.
*/
public class AutoRestResourceFlatteningTestServiceImpl extends AzureServiceClient {
/** The Retrofit service to perform REST calls. */
private AutoRestResourceFlatteningTestServiceService service;
/** the {@link AzureClient} used for long running operations. */
private AzureClient azureClient;
/**
* Gets the {@link AzureClient} used for long running operations.
* @return the azure client;
*/
public AzureClient getAzureClient() {
return this.azureClient;
}
/** Gets or sets the preferred language for the response. */
private String acceptLanguage;
/**
* Gets Gets or sets the preferred language for the response.
*
* @return the acceptLanguage value.
*/
public String acceptLanguage() {
return this.acceptLanguage;
}
/**
* Sets Gets or sets the preferred language for the response.
*
* @param acceptLanguage the acceptLanguage value.
* @return the service client itself
*/
public AutoRestResourceFlatteningTestServiceImpl withAcceptLanguage(String acceptLanguage) {
this.acceptLanguage = acceptLanguage;
return this;
}
/** Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30. */
private int longRunningOperationRetryTimeout;
/**
* Gets Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30.
*
* @return the longRunningOperationRetryTimeout value.
*/
public int longRunningOperationRetryTimeout() {
return this.longRunningOperationRetryTimeout;
}
/**
* Sets Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30.
*
* @param longRunningOperationRetryTimeout the longRunningOperationRetryTimeout value.
* @return the service client itself
*/
public AutoRestResourceFlatteningTestServiceImpl withLongRunningOperationRetryTimeout(int longRunningOperationRetryTimeout) {
this.longRunningOperationRetryTimeout = longRunningOperationRetryTimeout;
return this;
}
/** When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true. */
private boolean generateClientRequestId;
/**
* Gets When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true.
*
* @return the generateClientRequestId value.
*/
public boolean generateClientRequestId() {
return this.generateClientRequestId;
}
/**
* Sets When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true.
*
* @param generateClientRequestId the generateClientRequestId value.
* @return the service client itself
*/
public AutoRestResourceFlatteningTestServiceImpl withGenerateClientRequestId(boolean generateClientRequestId) {
this.generateClientRequestId = generateClientRequestId;
return this;
}
/**
* Initializes an instance of AutoRestResourceFlatteningTestService client.
*
* @param credentials the management credentials for Azure
*/
public AutoRestResourceFlatteningTestServiceImpl(ServiceClientCredentials credentials) {
this("http://localhost", credentials);
}
/**
* Initializes an instance of AutoRestResourceFlatteningTestService client.
*
* @param baseUrl the base URL of the host
* @param credentials the management credentials for Azure
*/
public AutoRestResourceFlatteningTestServiceImpl(String baseUrl, ServiceClientCredentials credentials) {
super(baseUrl, credentials);
initialize();
}
/**
* Initializes an instance of AutoRestResourceFlatteningTestService client.
*
* @param restClient the REST client to connect to Azure.
*/
public AutoRestResourceFlatteningTestServiceImpl(RestClient restClient) {
super(restClient);
initialize();
}
protected void initialize() {
this.acceptLanguage = "en-US";
this.longRunningOperationRetryTimeout = 30;
this.generateClientRequestId = true;
this.azureClient = new AzureClient(this);
initializeService();
}
/**
* Gets the User-Agent header for the client.
*
* @return the user agent string.
*/
@Override
public String userAgent() {
return String.format("Azure-SDK-For-Java/%s (%s)",
getClass().getPackage().getImplementationVersion(),
"AutoRestResourceFlatteningTestService, 1.0.0");
}
private void initializeService() {
service = restClient().retrofit().create(AutoRestResourceFlatteningTestServiceService.class);
}
/**
* The interface defining all the services for AutoRestResourceFlatteningTestService to be
* used by Retrofit to perform actually REST calls.
*/
interface AutoRestResourceFlatteningTestServiceService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azureresource.AutoRestResourceFlatteningTestService putArray" })
@PUT("azure/resource-flatten/array")
Observable<Response<ResponseBody>> putArray(@Body List<Resource> resourceArray, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azureresource.AutoRestResourceFlatteningTestService getArray" })
@GET("azure/resource-flatten/array")
Observable<Response<ResponseBody>> getArray(@Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azureresource.AutoRestResourceFlatteningTestService putDictionary" })
@PUT("azure/resource-flatten/dictionary")
Observable<Response<ResponseBody>> putDictionary(@Body Map<String, FlattenedProductInner> resourceDictionary, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azureresource.AutoRestResourceFlatteningTestService getDictionary" })
@GET("azure/resource-flatten/dictionary")
Observable<Response<ResponseBody>> getDictionary(@Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azureresource.AutoRestResourceFlatteningTestService putResourceCollection" })
@PUT("azure/resource-flatten/resourcecollection")
Observable<Response<ResponseBody>> putResourceCollection(@Body ResourceCollectionInner resourceComplexObject, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: fixtures.azureresource.AutoRestResourceFlatteningTestService getResourceCollection" })
@GET("azure/resource-flatten/resourcecollection")
Observable<Response<ResponseBody>> getResourceCollection(@Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Put External Resource as an Array.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void putArray() {
putArrayWithServiceResponseAsync().toBlocking().single().body();
}
/**
* Put External Resource as an Array.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> putArrayAsync(final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(putArrayWithServiceResponseAsync(), serviceCallback);
}
/**
* Put External Resource as an Array.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putArrayAsync() {
return putArrayWithServiceResponseAsync().map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Put External Resource as an Array.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putArrayWithServiceResponseAsync() {
final List<Resource> resourceArray = null;
return service.putArray(resourceArray, this.acceptLanguage(), this.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putArrayDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
/**
* Put External Resource as an Array.
*
* @param resourceArray External Resource as an Array to put
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void putArray(List<Resource> resourceArray) {
putArrayWithServiceResponseAsync(resourceArray).toBlocking().single().body();
}
/**
* Put External Resource as an Array.
*
* @param resourceArray External Resource as an Array to put
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> putArrayAsync(List<Resource> resourceArray, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(putArrayWithServiceResponseAsync(resourceArray), serviceCallback);
}
/**
* Put External Resource as an Array.
*
* @param resourceArray External Resource as an Array to put
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putArrayAsync(List<Resource> resourceArray) {
return putArrayWithServiceResponseAsync(resourceArray).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Put External Resource as an Array.
*
* @param resourceArray External Resource as an Array to put
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putArrayWithServiceResponseAsync(List<Resource> resourceArray) {
Validator.validate(resourceArray);
return service.putArray(resourceArray, this.acceptLanguage(), this.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putArrayDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putArrayDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return this.restClient().responseBuilderFactory().<Void, ErrorException>newInstance(this.serializerAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get External Resource as an Array.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the List<FlattenedProductInner> object if successful.
*/
public List<FlattenedProductInner> getArray() {
return getArrayWithServiceResponseAsync().toBlocking().single().body();
}
/**
* Get External Resource as an Array.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<FlattenedProductInner>> getArrayAsync(final ServiceCallback<List<FlattenedProductInner>> serviceCallback) {
return ServiceFuture.fromResponse(getArrayWithServiceResponseAsync(), serviceCallback);
}
/**
* Get External Resource as an Array.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<FlattenedProductInner> object
*/
public Observable<List<FlattenedProductInner>> getArrayAsync() {
return getArrayWithServiceResponseAsync().map(new Func1<ServiceResponse<List<FlattenedProductInner>>, List<FlattenedProductInner>>() {
@Override
public List<FlattenedProductInner> call(ServiceResponse<List<FlattenedProductInner>> response) {
return response.body();
}
});
}
/**
* Get External Resource as an Array.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<FlattenedProductInner> object
*/
public Observable<ServiceResponse<List<FlattenedProductInner>>> getArrayWithServiceResponseAsync() {
return service.getArray(this.acceptLanguage(), this.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<List<FlattenedProductInner>>>>() {
@Override
public Observable<ServiceResponse<List<FlattenedProductInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<List<FlattenedProductInner>> clientResponse = getArrayDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<List<FlattenedProductInner>> getArrayDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return this.restClient().responseBuilderFactory().<List<FlattenedProductInner>, ErrorException>newInstance(this.serializerAdapter())
.register(200, new TypeToken<List<FlattenedProductInner>>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Put External Resource as a Dictionary.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void putDictionary() {
putDictionaryWithServiceResponseAsync().toBlocking().single().body();
}
/**
* Put External Resource as a Dictionary.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> putDictionaryAsync(final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(putDictionaryWithServiceResponseAsync(), serviceCallback);
}
/**
* Put External Resource as a Dictionary.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putDictionaryAsync() {
return putDictionaryWithServiceResponseAsync().map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Put External Resource as a Dictionary.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putDictionaryWithServiceResponseAsync() {
final Map<String, FlattenedProductInner> resourceDictionary = null;
return service.putDictionary(resourceDictionary, this.acceptLanguage(), this.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putDictionaryDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
/**
* Put External Resource as a Dictionary.
*
* @param resourceDictionary External Resource as a Dictionary to put
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void putDictionary(Map<String, FlattenedProductInner> resourceDictionary) {
putDictionaryWithServiceResponseAsync(resourceDictionary).toBlocking().single().body();
}
/**
* Put External Resource as a Dictionary.
*
* @param resourceDictionary External Resource as a Dictionary to put
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> putDictionaryAsync(Map<String, FlattenedProductInner> resourceDictionary, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(putDictionaryWithServiceResponseAsync(resourceDictionary), serviceCallback);
}
/**
* Put External Resource as a Dictionary.
*
* @param resourceDictionary External Resource as a Dictionary to put
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putDictionaryAsync(Map<String, FlattenedProductInner> resourceDictionary) {
return putDictionaryWithServiceResponseAsync(resourceDictionary).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Put External Resource as a Dictionary.
*
* @param resourceDictionary External Resource as a Dictionary to put
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putDictionaryWithServiceResponseAsync(Map<String, FlattenedProductInner> resourceDictionary) {
Validator.validate(resourceDictionary);
return service.putDictionary(resourceDictionary, this.acceptLanguage(), this.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putDictionaryDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putDictionaryDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return this.restClient().responseBuilderFactory().<Void, ErrorException>newInstance(this.serializerAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get External Resource as a Dictionary.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the Map<String, FlattenedProductInner> object if successful.
*/
public Map<String, FlattenedProductInner> getDictionary() {
return getDictionaryWithServiceResponseAsync().toBlocking().single().body();
}
/**
* Get External Resource as a Dictionary.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Map<String, FlattenedProductInner>> getDictionaryAsync(final ServiceCallback<Map<String, FlattenedProductInner>> serviceCallback) {
return ServiceFuture.fromResponse(getDictionaryWithServiceResponseAsync(), serviceCallback);
}
/**
* Get External Resource as a Dictionary.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the Map<String, FlattenedProductInner> object
*/
public Observable<Map<String, FlattenedProductInner>> getDictionaryAsync() {
return getDictionaryWithServiceResponseAsync().map(new Func1<ServiceResponse<Map<String, FlattenedProductInner>>, Map<String, FlattenedProductInner>>() {
@Override
public Map<String, FlattenedProductInner> call(ServiceResponse<Map<String, FlattenedProductInner>> response) {
return response.body();
}
});
}
/**
* Get External Resource as a Dictionary.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the Map<String, FlattenedProductInner> object
*/
public Observable<ServiceResponse<Map<String, FlattenedProductInner>>> getDictionaryWithServiceResponseAsync() {
return service.getDictionary(this.acceptLanguage(), this.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Map<String, FlattenedProductInner>>>>() {
@Override
public Observable<ServiceResponse<Map<String, FlattenedProductInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Map<String, FlattenedProductInner>> clientResponse = getDictionaryDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Map<String, FlattenedProductInner>> getDictionaryDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return this.restClient().responseBuilderFactory().<Map<String, FlattenedProductInner>, ErrorException>newInstance(this.serializerAdapter())
.register(200, new TypeToken<Map<String, FlattenedProductInner>>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Put External Resource as a ResourceCollection.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void putResourceCollection() {
putResourceCollectionWithServiceResponseAsync().toBlocking().single().body();
}
/**
* Put External Resource as a ResourceCollection.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> putResourceCollectionAsync(final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(putResourceCollectionWithServiceResponseAsync(), serviceCallback);
}
/**
* Put External Resource as a ResourceCollection.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putResourceCollectionAsync() {
return putResourceCollectionWithServiceResponseAsync().map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Put External Resource as a ResourceCollection.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putResourceCollectionWithServiceResponseAsync() {
final ResourceCollectionInner resourceComplexObject = null;
return service.putResourceCollection(resourceComplexObject, this.acceptLanguage(), this.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putResourceCollectionDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
/**
* Put External Resource as a ResourceCollection.
*
* @param resourceComplexObject External Resource as a ResourceCollection to put
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void putResourceCollection(ResourceCollectionInner resourceComplexObject) {
putResourceCollectionWithServiceResponseAsync(resourceComplexObject).toBlocking().single().body();
}
/**
* Put External Resource as a ResourceCollection.
*
* @param resourceComplexObject External Resource as a ResourceCollection to put
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> putResourceCollectionAsync(ResourceCollectionInner resourceComplexObject, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(putResourceCollectionWithServiceResponseAsync(resourceComplexObject), serviceCallback);
}
/**
* Put External Resource as a ResourceCollection.
*
* @param resourceComplexObject External Resource as a ResourceCollection to put
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putResourceCollectionAsync(ResourceCollectionInner resourceComplexObject) {
return putResourceCollectionWithServiceResponseAsync(resourceComplexObject).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Put External Resource as a ResourceCollection.
*
* @param resourceComplexObject External Resource as a ResourceCollection to put
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putResourceCollectionWithServiceResponseAsync(ResourceCollectionInner resourceComplexObject) {
Validator.validate(resourceComplexObject);
return service.putResourceCollection(resourceComplexObject, this.acceptLanguage(), this.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putResourceCollectionDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putResourceCollectionDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return this.restClient().responseBuilderFactory().<Void, ErrorException>newInstance(this.serializerAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get External Resource as a ResourceCollection.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ResourceCollectionInner object if successful.
*/
public ResourceCollectionInner getResourceCollection() {
return getResourceCollectionWithServiceResponseAsync().toBlocking().single().body();
}
/**
* Get External Resource as a ResourceCollection.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ResourceCollectionInner> getResourceCollectionAsync(final ServiceCallback<ResourceCollectionInner> serviceCallback) {
return ServiceFuture.fromResponse(getResourceCollectionWithServiceResponseAsync(), serviceCallback);
}
/**
* Get External Resource as a ResourceCollection.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ResourceCollectionInner object
*/
public Observable<ResourceCollectionInner> getResourceCollectionAsync() {
return getResourceCollectionWithServiceResponseAsync().map(new Func1<ServiceResponse<ResourceCollectionInner>, ResourceCollectionInner>() {
@Override
public ResourceCollectionInner call(ServiceResponse<ResourceCollectionInner> response) {
return response.body();
}
});
}
/**
* Get External Resource as a ResourceCollection.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ResourceCollectionInner object
*/
public Observable<ServiceResponse<ResourceCollectionInner>> getResourceCollectionWithServiceResponseAsync() {
return service.getResourceCollection(this.acceptLanguage(), this.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ResourceCollectionInner>>>() {
@Override
public Observable<ServiceResponse<ResourceCollectionInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ResourceCollectionInner> clientResponse = getResourceCollectionDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<ResourceCollectionInner> getResourceCollectionDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return this.restClient().responseBuilderFactory().<ResourceCollectionInner, ErrorException>newInstance(this.serializerAdapter())
.register(200, new TypeToken<ResourceCollectionInner>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
}
| |
/*
* Copyright 2009 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestMultiParallel {
private static final Log LOG = LogFactory.getLog(TestMultiParallel.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final byte[] VALUE = Bytes.toBytes("value");
private static final byte[] QUALIFIER = Bytes.toBytes("qual");
private static final String FAMILY = "family";
private static final String TEST_TABLE = "multi_test_table";
private static final byte[] BYTES_FAMILY = Bytes.toBytes(FAMILY);
private static final byte[] ONE_ROW = Bytes.toBytes("xxx");
private static final byte [][] KEYS = makeKeys();
@BeforeClass public static void beforeClass() throws Exception {
UTIL.startMiniCluster(2);
HTable t = UTIL.createTable(Bytes.toBytes(TEST_TABLE), Bytes.toBytes(FAMILY));
UTIL.createMultiRegions(t, Bytes.toBytes(FAMILY));
}
@AfterClass public static void afterClass() throws IOException {
UTIL.getMiniHBaseCluster().shutdown();
}
@Before public void before() throws IOException {
LOG.info("before");
if (UTIL.ensureSomeRegionServersAvailable(2)) {
// Distribute regions
UTIL.getMiniHBaseCluster().getMaster().balance();
}
LOG.info("before done");
}
private static byte[][] makeKeys() {
byte [][] starterKeys = HBaseTestingUtility.KEYS;
// Create a "non-uniform" test set with the following characteristics:
// a) Unequal number of keys per region
// Don't use integer as a multiple, so that we have a number of keys that is
// not a multiple of the number of regions
int numKeys = (int) ((float) starterKeys.length * 10.33F);
List<byte[]> keys = new ArrayList<byte[]>();
for (int i = 0; i < numKeys; i++) {
int kIdx = i % starterKeys.length;
byte[] k = starterKeys[kIdx];
byte[] cp = new byte[k.length + 1];
System.arraycopy(k, 0, cp, 0, k.length);
cp[k.length] = new Integer(i % 256).byteValue();
keys.add(cp);
}
// b) Same duplicate keys (showing multiple Gets/Puts to the same row, which
// should work)
// c) keys are not in sorted order (within a region), to ensure that the
// sorting code and index mapping doesn't break the functionality
for (int i = 0; i < 100; i++) {
int kIdx = i % starterKeys.length;
byte[] k = starterKeys[kIdx];
byte[] cp = new byte[k.length + 1];
System.arraycopy(k, 0, cp, 0, k.length);
cp[k.length] = new Integer(i % 256).byteValue();
keys.add(cp);
}
return keys.toArray(new byte [][] {new byte [] {}});
}
@Test public void testBatchWithGet() throws Exception {
LOG.info("test=testBatchWithGet");
HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE);
// load test data
List<Row> puts = constructPutRequests();
table.batch(puts);
// create a list of gets and run it
List<Row> gets = new ArrayList<Row>();
for (byte[] k : KEYS) {
Get get = new Get(k);
get.addColumn(BYTES_FAMILY, QUALIFIER);
gets.add(get);
}
Result[] multiRes = new Result[gets.size()];
table.batch(gets, multiRes);
// Same gets using individual call API
List<Result> singleRes = new ArrayList<Result>();
for (Row get : gets) {
singleRes.add(table.get((Get) get));
}
// Compare results
Assert.assertEquals(singleRes.size(), multiRes.length);
for (int i = 0; i < singleRes.size(); i++) {
Assert.assertTrue(singleRes.get(i).containsColumn(BYTES_FAMILY, QUALIFIER));
KeyValue[] singleKvs = singleRes.get(i).raw();
KeyValue[] multiKvs = multiRes[i].raw();
for (int j = 0; j < singleKvs.length; j++) {
Assert.assertEquals(singleKvs[j], multiKvs[j]);
Assert.assertEquals(0, Bytes.compareTo(singleKvs[j].getValue(), multiKvs[j]
.getValue()));
}
}
}
@Test
public void testBadFam() throws Exception {
LOG.info("test=testBadFam");
HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE);
List<Row> actions = new ArrayList<Row>();
Put p = new Put(Bytes.toBytes("row1"));
p.add(Bytes.toBytes("bad_family"), Bytes.toBytes("qual"), Bytes.toBytes("value"));
actions.add(p);
p = new Put(Bytes.toBytes("row2"));
p.add(BYTES_FAMILY, Bytes.toBytes("qual"), Bytes.toBytes("value"));
actions.add(p);
// row1 and row2 should be in the same region.
Object [] r = new Object[actions.size()];
try {
table.batch(actions, r);
fail();
} catch (RetriesExhaustedWithDetailsException ex) {
LOG.debug(ex);
// good!
assertFalse(ex.mayHaveClusterIssues());
}
assertEquals(2, r.length);
assertTrue(r[0] instanceof Throwable);
assertTrue(r[1] instanceof Result);
}
/**
* Only run one Multi test with a forced RegionServer abort. Otherwise, the
* unit tests will take an unnecessarily long time to run.
*
* @throws Exception
*/
@Test public void testFlushCommitsWithAbort() throws Exception {
LOG.info("test=testFlushCommitsWithAbort");
doTestFlushCommits(true);
}
@Test public void testFlushCommitsNoAbort() throws Exception {
LOG.info("test=testFlushCommitsNoAbort");
doTestFlushCommits(false);
}
private void doTestFlushCommits(boolean doAbort) throws Exception {
// Load the data
LOG.info("get new table");
HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE);
table.setAutoFlush(false);
table.setWriteBufferSize(10 * 1024 * 1024);
LOG.info("constructPutRequests");
List<Row> puts = constructPutRequests();
for (Row put : puts) {
table.put((Put) put);
}
LOG.info("puts");
table.flushCommits();
if (doAbort) {
LOG.info("Aborted=" + UTIL.getMiniHBaseCluster().abortRegionServer(0));
// try putting more keys after the abort. same key/qual... just validating
// no exceptions thrown
puts = constructPutRequests();
for (Row put : puts) {
table.put((Put) put);
}
table.flushCommits();
}
LOG.info("validating loaded data");
validateLoadedData(table);
// Validate server and region count
List<JVMClusterUtil.RegionServerThread> liveRSs =
UTIL.getMiniHBaseCluster().getLiveRegionServerThreads();
int count = 0;
for (JVMClusterUtil.RegionServerThread t: liveRSs) {
count++;
LOG.info("Count=" + count + ", Alive=" + t.getRegionServer());
}
LOG.info("Count=" + count);
Assert.assertEquals("Server count=" + count + ", abort=" + doAbort,
(doAbort ? 1 : 2), count);
for (JVMClusterUtil.RegionServerThread t: liveRSs) {
int regions = t.getRegionServer().getOnlineRegions().size();
Assert.assertTrue("Count of regions=" + regions, regions > 10);
}
LOG.info("done");
}
@Test public void testBatchWithPut() throws Exception {
LOG.info("test=testBatchWithPut");
HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE);
// put multiple rows using a batch
List<Row> puts = constructPutRequests();
Object[] results = table.batch(puts);
validateSizeAndEmpty(results, KEYS.length);
if (true) {
UTIL.getMiniHBaseCluster().abortRegionServer(0);
puts = constructPutRequests();
results = table.batch(puts);
validateSizeAndEmpty(results, KEYS.length);
}
validateLoadedData(table);
}
@Test public void testBatchWithDelete() throws Exception {
LOG.info("test=testBatchWithDelete");
HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE);
// Load some data
List<Row> puts = constructPutRequests();
Object[] results = table.batch(puts);
validateSizeAndEmpty(results, KEYS.length);
// Deletes
List<Row> deletes = new ArrayList<Row>();
for (int i = 0; i < KEYS.length; i++) {
Delete delete = new Delete(KEYS[i]);
delete.deleteFamily(BYTES_FAMILY);
deletes.add(delete);
}
results = table.batch(deletes);
validateSizeAndEmpty(results, KEYS.length);
// Get to make sure ...
for (byte[] k : KEYS) {
Get get = new Get(k);
get.addColumn(BYTES_FAMILY, QUALIFIER);
Assert.assertFalse(table.exists(get));
}
}
@Test public void testHTableDeleteWithList() throws Exception {
LOG.info("test=testHTableDeleteWithList");
HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE);
// Load some data
List<Row> puts = constructPutRequests();
Object[] results = table.batch(puts);
validateSizeAndEmpty(results, KEYS.length);
// Deletes
ArrayList<Delete> deletes = new ArrayList<Delete>();
for (int i = 0; i < KEYS.length; i++) {
Delete delete = new Delete(KEYS[i]);
delete.deleteFamily(BYTES_FAMILY);
deletes.add(delete);
}
table.delete(deletes);
Assert.assertTrue(deletes.isEmpty());
// Get to make sure ...
for (byte[] k : KEYS) {
Get get = new Get(k);
get.addColumn(BYTES_FAMILY, QUALIFIER);
Assert.assertFalse(table.exists(get));
}
}
@Test public void testBatchWithManyColsInOneRowGetAndPut() throws Exception {
LOG.info("test=testBatchWithManyColsInOneRowGetAndPut");
HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE);
List<Row> puts = new ArrayList<Row>();
for (int i = 0; i < 100; i++) {
Put put = new Put(ONE_ROW);
byte[] qual = Bytes.toBytes("column" + i);
put.add(BYTES_FAMILY, qual, VALUE);
puts.add(put);
}
Object[] results = table.batch(puts);
// validate
validateSizeAndEmpty(results, 100);
// get the data back and validate that it is correct
List<Row> gets = new ArrayList<Row>();
for (int i = 0; i < 100; i++) {
Get get = new Get(ONE_ROW);
byte[] qual = Bytes.toBytes("column" + i);
get.addColumn(BYTES_FAMILY, qual);
gets.add(get);
}
Object[] multiRes = table.batch(gets);
int idx = 0;
for (Object r : multiRes) {
byte[] qual = Bytes.toBytes("column" + idx);
validateResult(r, qual, VALUE);
idx++;
}
}
@Test public void testBatchWithMixedActions() throws Exception {
LOG.info("test=testBatchWithMixedActions");
HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE);
// Load some data to start
Object[] results = table.batch(constructPutRequests());
validateSizeAndEmpty(results, KEYS.length);
// Batch: get, get, put(new col), delete, get, get of put, get of deleted,
// put
List<Row> actions = new ArrayList<Row>();
byte[] qual2 = Bytes.toBytes("qual2");
byte[] val2 = Bytes.toBytes("putvalue2");
// 0 get
Get get = new Get(KEYS[10]);
get.addColumn(BYTES_FAMILY, QUALIFIER);
actions.add(get);
// 1 get
get = new Get(KEYS[11]);
get.addColumn(BYTES_FAMILY, QUALIFIER);
actions.add(get);
// 2 put of new column
Put put = new Put(KEYS[10]);
put.add(BYTES_FAMILY, qual2, val2);
actions.add(put);
// 3 delete
Delete delete = new Delete(KEYS[20]);
delete.deleteFamily(BYTES_FAMILY);
actions.add(delete);
// 4 get
get = new Get(KEYS[30]);
get.addColumn(BYTES_FAMILY, QUALIFIER);
actions.add(get);
// There used to be a 'get' of a previous put here, but removed
// since this API really cannot guarantee order in terms of mixed
// get/puts.
// 5 put of new column
put = new Put(KEYS[40]);
put.add(BYTES_FAMILY, qual2, val2);
actions.add(put);
results = table.batch(actions);
// Validation
validateResult(results[0]);
validateResult(results[1]);
validateEmpty(results[2]);
validateEmpty(results[3]);
validateResult(results[4]);
validateEmpty(results[5]);
// validate last put, externally from the batch
get = new Get(KEYS[40]);
get.addColumn(BYTES_FAMILY, qual2);
Result r = table.get(get);
validateResult(r, qual2, val2);
}
// // Helper methods ////
private void validateResult(Object r) {
validateResult(r, QUALIFIER, VALUE);
}
private void validateResult(Object r1, byte[] qual, byte[] val) {
// TODO provide nice assert here or something.
Result r = (Result)r1;
Assert.assertTrue(r.containsColumn(BYTES_FAMILY, qual));
Assert.assertEquals(0, Bytes.compareTo(val, r.getValue(BYTES_FAMILY, qual)));
}
private List<Row> constructPutRequests() {
List<Row> puts = new ArrayList<Row>();
for (byte[] k : KEYS) {
Put put = new Put(k);
put.add(BYTES_FAMILY, QUALIFIER, VALUE);
puts.add(put);
}
return puts;
}
private void validateLoadedData(HTable table) throws IOException {
// get the data back and validate that it is correct
for (byte[] k : KEYS) {
LOG.info("Assert=" + Bytes.toString(k));
Get get = new Get(k);
get.addColumn(BYTES_FAMILY, QUALIFIER);
Result r = table.get(get);
Assert.assertTrue(r.containsColumn(BYTES_FAMILY, QUALIFIER));
Assert.assertEquals(0, Bytes.compareTo(VALUE, r
.getValue(BYTES_FAMILY, QUALIFIER)));
}
}
private void validateEmpty(Object r1) {
Result result = (Result)r1;
Assert.assertTrue(result != null);
Assert.assertTrue(result.getRow() == null);
Assert.assertEquals(0, result.raw().length);
}
private void validateSizeAndEmpty(Object[] results, int expectedSize) {
// Validate got back the same number of Result objects, all empty
Assert.assertEquals(expectedSize, results.length);
for (Object result : results) {
validateEmpty(result);
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: grpc/block_master.proto
package alluxio.grpc;
/**
* Protobuf type {@code alluxio.grpc.block.GetWorkerLostStoragePOptions}
*/
public final class GetWorkerLostStoragePOptions extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:alluxio.grpc.block.GetWorkerLostStoragePOptions)
GetWorkerLostStoragePOptionsOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetWorkerLostStoragePOptions.newBuilder() to construct.
private GetWorkerLostStoragePOptions(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetWorkerLostStoragePOptions() {
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetWorkerLostStoragePOptions(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return alluxio.grpc.BlockMasterProto.internal_static_alluxio_grpc_block_GetWorkerLostStoragePOptions_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return alluxio.grpc.BlockMasterProto.internal_static_alluxio_grpc_block_GetWorkerLostStoragePOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
alluxio.grpc.GetWorkerLostStoragePOptions.class, alluxio.grpc.GetWorkerLostStoragePOptions.Builder.class);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof alluxio.grpc.GetWorkerLostStoragePOptions)) {
return super.equals(obj);
}
alluxio.grpc.GetWorkerLostStoragePOptions other = (alluxio.grpc.GetWorkerLostStoragePOptions) obj;
boolean result = true;
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static alluxio.grpc.GetWorkerLostStoragePOptions parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static alluxio.grpc.GetWorkerLostStoragePOptions parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static alluxio.grpc.GetWorkerLostStoragePOptions parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static alluxio.grpc.GetWorkerLostStoragePOptions parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static alluxio.grpc.GetWorkerLostStoragePOptions parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static alluxio.grpc.GetWorkerLostStoragePOptions parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static alluxio.grpc.GetWorkerLostStoragePOptions parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static alluxio.grpc.GetWorkerLostStoragePOptions parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static alluxio.grpc.GetWorkerLostStoragePOptions parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static alluxio.grpc.GetWorkerLostStoragePOptions parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static alluxio.grpc.GetWorkerLostStoragePOptions parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static alluxio.grpc.GetWorkerLostStoragePOptions parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(alluxio.grpc.GetWorkerLostStoragePOptions prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code alluxio.grpc.block.GetWorkerLostStoragePOptions}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:alluxio.grpc.block.GetWorkerLostStoragePOptions)
alluxio.grpc.GetWorkerLostStoragePOptionsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return alluxio.grpc.BlockMasterProto.internal_static_alluxio_grpc_block_GetWorkerLostStoragePOptions_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return alluxio.grpc.BlockMasterProto.internal_static_alluxio_grpc_block_GetWorkerLostStoragePOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
alluxio.grpc.GetWorkerLostStoragePOptions.class, alluxio.grpc.GetWorkerLostStoragePOptions.Builder.class);
}
// Construct using alluxio.grpc.GetWorkerLostStoragePOptions.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return alluxio.grpc.BlockMasterProto.internal_static_alluxio_grpc_block_GetWorkerLostStoragePOptions_descriptor;
}
public alluxio.grpc.GetWorkerLostStoragePOptions getDefaultInstanceForType() {
return alluxio.grpc.GetWorkerLostStoragePOptions.getDefaultInstance();
}
public alluxio.grpc.GetWorkerLostStoragePOptions build() {
alluxio.grpc.GetWorkerLostStoragePOptions result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public alluxio.grpc.GetWorkerLostStoragePOptions buildPartial() {
alluxio.grpc.GetWorkerLostStoragePOptions result = new alluxio.grpc.GetWorkerLostStoragePOptions(this);
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof alluxio.grpc.GetWorkerLostStoragePOptions) {
return mergeFrom((alluxio.grpc.GetWorkerLostStoragePOptions)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(alluxio.grpc.GetWorkerLostStoragePOptions other) {
if (other == alluxio.grpc.GetWorkerLostStoragePOptions.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
alluxio.grpc.GetWorkerLostStoragePOptions parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (alluxio.grpc.GetWorkerLostStoragePOptions) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:alluxio.grpc.block.GetWorkerLostStoragePOptions)
}
// @@protoc_insertion_point(class_scope:alluxio.grpc.block.GetWorkerLostStoragePOptions)
private static final alluxio.grpc.GetWorkerLostStoragePOptions DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new alluxio.grpc.GetWorkerLostStoragePOptions();
}
public static alluxio.grpc.GetWorkerLostStoragePOptions getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final com.google.protobuf.Parser<GetWorkerLostStoragePOptions>
PARSER = new com.google.protobuf.AbstractParser<GetWorkerLostStoragePOptions>() {
public GetWorkerLostStoragePOptions parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetWorkerLostStoragePOptions(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetWorkerLostStoragePOptions> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetWorkerLostStoragePOptions> getParserForType() {
return PARSER;
}
public alluxio.grpc.GetWorkerLostStoragePOptions getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package org.apereo.cas.config;
import com.google.common.collect.Lists;
import org.apereo.cas.CentralAuthenticationService;
import org.apereo.cas.authentication.AuthenticationContextValidator;
import org.apereo.cas.authentication.AuthenticationHandler;
import org.apereo.cas.authentication.AuthenticationSystemSupport;
import org.apereo.cas.authentication.MultifactorTriggerSelectionStrategy;
import org.apereo.cas.authentication.principal.DefaultPrincipalFactory;
import org.apereo.cas.authentication.principal.PrincipalFactory;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.services.ServicesManager;
import org.apereo.cas.support.openid.authentication.handler.support.OpenIdCredentialsAuthenticationHandler;
import org.apereo.cas.support.openid.authentication.principal.OpenIdPrincipalResolver;
import org.apereo.cas.support.openid.authentication.principal.OpenIdService;
import org.apereo.cas.support.openid.authentication.principal.OpenIdServiceFactory;
import org.apereo.cas.support.openid.web.OpenIdProviderController;
import org.apereo.cas.support.openid.web.flow.OpenIdSingleSignOnAction;
import org.apereo.cas.support.openid.web.mvc.OpenIdValidateController;
import org.apereo.cas.support.openid.web.mvc.SmartOpenIdController;
import org.apereo.cas.support.openid.web.support.DefaultOpenIdUserNameExtractor;
import org.apereo.cas.support.openid.web.support.OpenIdPostUrlHandlerMapping;
import org.apereo.cas.support.openid.web.support.OpenIdUserNameExtractor;
import org.apereo.cas.ticket.UniqueTicketIdGenerator;
import org.apereo.cas.ticket.proxy.ProxyHandler;
import org.apereo.cas.ticket.registry.TicketRegistry;
import org.apereo.cas.validation.ValidationSpecification;
import org.apereo.cas.web.AbstractDelegateController;
import org.apereo.cas.web.DelegatingController;
import org.apereo.cas.web.support.ArgumentExtractor;
import org.apereo.services.persondir.IPersonAttributeDao;
import org.openid4java.server.InMemoryServerAssociationStore;
import org.openid4java.server.ServerManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.View;
import org.springframework.web.util.CookieGenerator;
import org.springframework.webflow.execution.Action;
import javax.annotation.PostConstruct;
import java.util.Map;
import java.util.Properties;
/**
* This is {@link OpenIdConfiguration}.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
@Configuration("openidConfiguration")
@EnableConfigurationProperties(CasConfigurationProperties.class)
public class OpenIdConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(OpenIdConfiguration.class);
@Autowired
@Qualifier("warnCookieGenerator")
private CookieGenerator warnCookieGenerator;
@Autowired
@Qualifier("cas3ServiceJsonView")
private View cas3ServiceJsonView;
@Autowired
@Qualifier("casOpenIdServiceSuccessView")
private View casOpenIdServiceSuccessView;
@Autowired
@Qualifier("casOpenIdServiceFailureView")
private View casOpenIdServiceFailureView;
@Autowired
@Qualifier("casOpenIdAssociationSuccessView")
private View casOpenIdAssociationSuccessView;
@Autowired
@Qualifier("proxy20Handler")
private ProxyHandler proxy20Handler;
@Autowired
@Qualifier("attributeRepository")
private IPersonAttributeDao attributeRepository;
@Autowired
@Qualifier("serviceTicketUniqueIdGenerator")
private UniqueTicketIdGenerator serviceTicketUniqueIdGenerator;
@Autowired
private CasConfigurationProperties casProperties;
@Autowired
@Qualifier("ticketRegistry")
private TicketRegistry ticketRegistry;
@Autowired
@Qualifier("centralAuthenticationService")
private CentralAuthenticationService centralAuthenticationService;
@Autowired
@Qualifier("authenticationContextValidator")
private AuthenticationContextValidator authenticationContextValidator;
@Autowired
@Qualifier("defaultAuthenticationSystemSupport")
private AuthenticationSystemSupport authenticationSystemSupport;
@Autowired
@Qualifier("cas20WithoutProxyProtocolValidationSpecification")
private ValidationSpecification cas20WithoutProxyProtocolValidationSpecification;
@Autowired
@Qualifier("defaultArgumentExtractor")
private ArgumentExtractor argumentExtractor;
@Autowired
@Qualifier("defaultMultifactorTriggerSelectionStrategy")
private MultifactorTriggerSelectionStrategy multifactorTriggerSelectionStrategy;
@Autowired
@Qualifier("servicesManager")
private ServicesManager servicesManager;
@Autowired
@Qualifier("authenticationHandlersResolvers")
private Map authenticationHandlersResolvers;
@Autowired
@Qualifier("uniqueIdGeneratorsMap")
private Map uniqueIdGeneratorsMap;
@Bean
public DelegatingController openidDelegatingController() {
final DelegatingController controller = new DelegatingController();
controller.setDelegates(Lists.newArrayList(smartOpenIdAssociationController(), openIdValidateController()));
return controller;
}
@Bean
public AbstractDelegateController smartOpenIdAssociationController() {
final SmartOpenIdController b = new SmartOpenIdController();
b.setServerManager(serverManager());
b.setSuccessView(this.casOpenIdAssociationSuccessView);
return b;
}
@Bean
public AbstractDelegateController openIdValidateController() {
final OpenIdValidateController c = new OpenIdValidateController();
c.setServerManager(serverManager());
c.setValidationSpecification(this.cas20WithoutProxyProtocolValidationSpecification);
c.setSuccessView(casOpenIdServiceSuccessView);
c.setFailureView(casOpenIdServiceFailureView);
c.setProxyHandler(proxy20Handler);
c.setAuthenticationSystemSupport(authenticationSystemSupport);
c.setServicesManager(servicesManager);
c.setCentralAuthenticationService(centralAuthenticationService);
c.setArgumentExtractor(argumentExtractor);
c.setMultifactorTriggerSelectionStrategy(multifactorTriggerSelectionStrategy);
c.setAuthenticationContextValidator(authenticationContextValidator);
c.setJsonView(cas3ServiceJsonView);
c.setAuthnContextAttribute(casProperties.getAuthn().getMfa().getAuthenticationContextAttribute());
return c;
}
@RefreshScope
@Bean
public ServerManager serverManager() {
final ServerManager manager = new ServerManager();
manager.setOPEndpointUrl(casProperties.getServer().getLoginUrl());
manager.setEnforceRpId(casProperties.getAuthn().getOpenid().isEnforceRpId());
manager.setSharedAssociations(new InMemoryServerAssociationStore());
LOGGER.info("Creating openid server manager with OP endpoint {}", casProperties.getServer().getLoginUrl());
return manager;
}
@Bean
public AuthenticationHandler openIdCredentialsAuthenticationHandler() {
final OpenIdCredentialsAuthenticationHandler h = new OpenIdCredentialsAuthenticationHandler();
h.setTicketRegistry(this.ticketRegistry);
h.setPrincipalFactory(openidPrincipalFactory());
h.setServicesManager(servicesManager);
return h;
}
@Bean
public OpenIdPrincipalResolver openIdPrincipalResolver() {
final OpenIdPrincipalResolver r = new OpenIdPrincipalResolver();
r.setAttributeRepository(attributeRepository);
r.setPrincipalAttributeName(casProperties.getAuthn().getOpenid().getPrincipal().getPrincipalAttribute());
r.setReturnNullIfNoAttributes(casProperties.getAuthn().getOpenid().getPrincipal().isReturnNull());
r.setPrincipalFactory(openidPrincipalFactory());
return r;
}
@Bean
public PrincipalFactory openidPrincipalFactory() {
return new DefaultPrincipalFactory();
}
@Bean
@RefreshScope
public OpenIdServiceFactory openIdServiceFactory() {
final OpenIdServiceFactory f = new OpenIdServiceFactory();
f.setOpenIdPrefixUrl(casProperties.getServer().getPrefix().concat("/openid"));
return f;
}
@Bean
@RefreshScope
public OpenIdProviderController openIdProviderController() {
return new OpenIdProviderController();
}
@Bean
public Action openIdSingleSignOnAction() {
final OpenIdSingleSignOnAction a = new OpenIdSingleSignOnAction();
a.setExtractor(defaultOpenIdUserNameExtractor());
a.setAuthenticationSystemSupport(authenticationSystemSupport);
a.setCentralAuthenticationService(centralAuthenticationService);
a.setPrincipalFactory(openidPrincipalFactory());
a.setWarnCookieGenerator(warnCookieGenerator);
return a;
}
@Bean
public OpenIdUserNameExtractor defaultOpenIdUserNameExtractor() {
return new DefaultOpenIdUserNameExtractor();
}
@Bean
public OpenIdPostUrlHandlerMapping openIdPostUrlHandlerMapping() {
final OpenIdPostUrlHandlerMapping m = new OpenIdPostUrlHandlerMapping();
m.setOrder(1);
final Properties mappings = new Properties();
mappings.put("/login", openidDelegatingController());
m.setMappings(mappings);
return m;
}
@PostConstruct
protected void initializeRootApplicationContext() {
authenticationHandlersResolvers.put(openIdCredentialsAuthenticationHandler(), openIdPrincipalResolver());
uniqueIdGeneratorsMap.put(OpenIdService.class.getCanonicalName(), this.serviceTicketUniqueIdGenerator);
this.argumentExtractor.getServiceFactories().add(0, openIdServiceFactory());
}
}
| |
/*
* Copyright 2019 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.collector.config;
import com.navercorp.pinpoint.common.util.Assert;
import com.navercorp.pinpoint.grpc.server.ServerOption;
import java.util.Objects;
import java.util.Properties;
/**
* @author Taejin Koo
*/
public final class StatReceiverConfiguration implements DataReceiverGroupConfiguration {
private static final String PREFIX = "collector.receiver.stat";
private static final String GRPC_PREFIX = "collector.receiver.grpc.stat";
private static final String GRPC_ENABLE = GRPC_PREFIX + ".enable";
private static final String GRPC_BIND_IP = GRPC_PREFIX + ".ip";
private static final String GRPC_BIND_PORT = GRPC_PREFIX + ".port";
private static final String GRPC_SERVER_EXECUTOR_THREAD_SIZE = GRPC_PREFIX + ".server.executor.thread.size";
private static final String GRPC_SERVER_EXECUTOR_QUEUE_SIZE = GRPC_PREFIX + ".server.executor.queue.size";
private static final String GRPC_SERVER_EXECUTOR_MONITOR_ENABLE = GRPC_PREFIX + ".server.executor.monitor.enable";
private static final String GRPC_WORKER_EXECUTOR_THREAD_SIZE = GRPC_PREFIX + ".worker.executor.thread.size";
private static final String GRPC_WORKER_EXECUTOR_QUEUE_SIZE = GRPC_PREFIX + ".worker.executor.queue.size";
private static final String GRPC_WORKER_EXECUTOR_MONITOR_ENABLE = GRPC_PREFIX + ".worker.executor.monitor.enable";
private static final String GRPC_STREAM_SCHEDULER_THREAD_SIZE = GRPC_PREFIX + ".stream.scheduler.thread.size";
private static final String GRPC_CALL_INIT_REQUEST_COUNT = GRPC_PREFIX + ".stream.call.init.request.count";
private static final String GRPC_STREAM_SCHEDULER_PERIOD_MILLIS = GRPC_PREFIX + ".stream.scheduler.period.millis";
private static final String GRPC_STREAM_SCHEDULER_RECOVERY_MESSAGE_COUNT = GRPC_PREFIX + ".stream.scheduler.recovery.message.count";
private static final String TCP_ENABLE = PREFIX + ".tcp";
private static final String TCP_BIND_IP = PREFIX + ".tcp.ip";
private static final String TCP_BIND_PORT = PREFIX + ".tcp.port";
private static final String UDP_ENABLE = PREFIX + ".udp";
private static final String UDP_BIND_IP = PREFIX + ".udp.ip";
private static final String UDP_BIND_PORT = PREFIX + ".udp.port";
private static final String UDP_RECEIVE_BUFFER_SIZE = PREFIX + ".udp.receiveBufferSize";
private static final String WORKER_THREAD_SIZE = PREFIX + ".worker.threadSize";
private static final String WORKER_QUEUE_SIZE = PREFIX + ".worker.queueSize";
private static final String WORKER_MONITOR_ENABLE = PREFIX + ".worker.monitor";
private final boolean isTcpEnable;
private final String tcpBindIp;
private final int tcpBindPort;
private final boolean isUdpEnable;
private final String udpBindIp;
private final int udpBindPort;
private final int udpReceiveBufferSize;
private final int workerThreadSize;
private final int workerQueueSize;
private final boolean workerMonitorEnable;
private final boolean isGrpcEnable;
private final String grpcBindIp;
private final int grpcBindPort;
private final int grpcServerExecutorThreadSize;
private final int grpcServerExecutorQueueSize;
private final boolean grpcServerExecutorMonitorEnable;
private final int grpcWorkerExecutorThreadSize;
private final int grpcWorkerExecutorQueueSize;
private final boolean grpcWorkerExecutorMonitorEnable;
private final int grpcStreamSchedulerThreadSize;
private final int grpcStreamCallInitRequestCount;
private final int grpcStreamSchedulerPeriodMillis;
private final int grpcStreamSchedulerRecoveryMessageCount;
private final ServerOption grpcServerOption;
public StatReceiverConfiguration(Properties properties, DeprecatedConfiguration deprecatedConfiguration) {
Objects.requireNonNull(properties, "properties");
Objects.requireNonNull(deprecatedConfiguration, "deprecatedConfiguration");
this.isTcpEnable = CollectorConfiguration.readBoolean(properties, TCP_ENABLE);
this.tcpBindIp = CollectorConfiguration.readString(properties, TCP_BIND_IP, CollectorConfiguration.DEFAULT_LISTEN_IP);
this.tcpBindPort = CollectorConfiguration.readInt(properties, TCP_BIND_PORT, -1);
this.isUdpEnable = isUdpEnable(properties, deprecatedConfiguration, true);
this.udpBindIp = getUdpBindIp(properties, deprecatedConfiguration, CollectorConfiguration.DEFAULT_LISTEN_IP);
this.udpBindPort = getUdpBindPort(properties, deprecatedConfiguration, 9995);
this.udpReceiveBufferSize = getUdpReceiveBufferSize(properties, deprecatedConfiguration, 1024 * 4096);
this.workerThreadSize = getWorkerThreadSize(properties, deprecatedConfiguration, 128);
Assert.isTrue(workerThreadSize > 0, "workerThreadSize must be greater than 0");
this.workerQueueSize = getWorkerQueueSize(properties, deprecatedConfiguration, 1024);
Assert.isTrue(workerQueueSize > 0, "workerQueueSize must be greater than 0");
this.workerMonitorEnable = isWorkerThreadMonitorEnable(properties, deprecatedConfiguration, false);
// gRPC
this.isGrpcEnable = CollectorConfiguration.readBoolean(properties, GRPC_ENABLE);
this.grpcBindIp = CollectorConfiguration.readString(properties, GRPC_BIND_IP, CollectorConfiguration.DEFAULT_LISTEN_IP);
this.grpcBindPort = CollectorConfiguration.readInt(properties, GRPC_BIND_PORT, 9992);
// Server executor
this.grpcServerExecutorThreadSize = CollectorConfiguration.readInt(properties, GRPC_SERVER_EXECUTOR_THREAD_SIZE, 128);
Assert.isTrue(grpcServerExecutorThreadSize > 0, "grpcServerExecutorThreadSize must be greater than 0");
this.grpcServerExecutorQueueSize = CollectorConfiguration.readInt(properties, GRPC_SERVER_EXECUTOR_QUEUE_SIZE, 1024 * 5);
Assert.isTrue(grpcServerExecutorQueueSize > 0, "grpcServerExecutorQueueSize must be greater than 0");
this.grpcServerExecutorMonitorEnable = CollectorConfiguration.readBoolean(properties, GRPC_SERVER_EXECUTOR_MONITOR_ENABLE);
// Work executor
this.grpcWorkerExecutorThreadSize = CollectorConfiguration.readInt(properties, GRPC_WORKER_EXECUTOR_THREAD_SIZE, 128);
Assert.isTrue(grpcWorkerExecutorThreadSize > 0, "grpcWorkerExecutorThreadSize must be greater than 0");
this.grpcWorkerExecutorQueueSize = CollectorConfiguration.readInt(properties, GRPC_WORKER_EXECUTOR_QUEUE_SIZE, 1024 * 5);
Assert.isTrue(grpcWorkerExecutorQueueSize > 0, "grpcWorkerExecutorQueueSize must be greater than 0");
this.grpcWorkerExecutorMonitorEnable = CollectorConfiguration.readBoolean(properties, GRPC_WORKER_EXECUTOR_MONITOR_ENABLE);
this.grpcStreamSchedulerThreadSize = CollectorConfiguration.readInt(properties, GRPC_STREAM_SCHEDULER_THREAD_SIZE, 1);
Assert.isTrue(grpcStreamSchedulerThreadSize > 0, "grpcStreamSchedulerThreadSize must be greater than 0");
this.grpcStreamSchedulerPeriodMillis = CollectorConfiguration.readInt(properties, GRPC_STREAM_SCHEDULER_PERIOD_MILLIS, 1000);
this.grpcStreamCallInitRequestCount = CollectorConfiguration.readInt(properties, GRPC_CALL_INIT_REQUEST_COUNT, 64);
this.grpcStreamSchedulerRecoveryMessageCount = CollectorConfiguration.readInt(properties, GRPC_STREAM_SCHEDULER_RECOVERY_MESSAGE_COUNT, 10);
// Server option
final ServerOption.Builder serverOptionBuilder = GrpcPropertiesServerOptionBuilder.newBuilder(properties, GRPC_PREFIX);
this.grpcServerOption = serverOptionBuilder.build();
validate();
}
private void validate() {
Assert.isTrue(isTcpEnable || isUdpEnable, "statReceiver does not allow tcp and udp disable");
if (isTcpEnable) {
Objects.requireNonNull(tcpBindIp, "tcpBindIp");
Assert.isTrue(tcpBindPort > 0, "tcpBindPort must be greater than 0");
}
if (isUdpEnable) {
Objects.requireNonNull(udpBindIp, "udpBindIp");
Assert.isTrue(udpBindPort > 0, "udpBindPort must be greater than 0");
Assert.isTrue(udpReceiveBufferSize > 0, "udpReceiveBufferSize must be greater than 0");
}
}
private boolean isUdpEnable(Properties properties, DeprecatedConfiguration deprecatedConfiguration, boolean defaultValue) {
if (properties.containsKey(UDP_ENABLE)) {
return CollectorConfiguration.readBoolean(properties, UDP_ENABLE);
}
return defaultValue;
}
private String getUdpBindIp(Properties properties, DeprecatedConfiguration deprecatedConfiguration, String defaultValue) {
if (properties.containsKey(UDP_BIND_IP)) {
return CollectorConfiguration.readString(properties, UDP_BIND_IP, null);
}
if (deprecatedConfiguration.isSetUdpStatListenIp()) {
return deprecatedConfiguration.getUdpStatListenIp();
}
return defaultValue;
}
private int getUdpBindPort(Properties properties, DeprecatedConfiguration deprecatedConfiguration, int defaultValue) {
if (properties.containsKey(UDP_BIND_PORT)) {
return CollectorConfiguration.readInt(properties, UDP_BIND_PORT, -1);
}
if (deprecatedConfiguration.isSetUdpStatListenPort()) {
return deprecatedConfiguration.getUdpStatListenPort();
}
return defaultValue;
}
private int getUdpReceiveBufferSize(Properties properties, DeprecatedConfiguration deprecatedConfiguration, int defaultValue) {
if (properties.containsKey(UDP_RECEIVE_BUFFER_SIZE)) {
return CollectorConfiguration.readInt(properties, UDP_RECEIVE_BUFFER_SIZE, -1);
}
if (deprecatedConfiguration.isSetUdpStatSocketReceiveBufferSize()) {
return deprecatedConfiguration.getUdpStatSocketReceiveBufferSize();
}
return defaultValue;
}
private int getWorkerThreadSize(Properties properties, DeprecatedConfiguration deprecatedConfiguration, int defaultValue) {
if (properties.containsKey(WORKER_THREAD_SIZE)) {
return CollectorConfiguration.readInt(properties, WORKER_THREAD_SIZE, -1);
}
if (deprecatedConfiguration.isSetUdpStatWorkerThread()) {
return deprecatedConfiguration.getUdpStatWorkerThread();
}
return defaultValue;
}
private int getWorkerQueueSize(Properties properties, DeprecatedConfiguration deprecatedConfiguration, int defaultValue) {
if (properties.containsKey(WORKER_QUEUE_SIZE)) {
return CollectorConfiguration.readInt(properties, WORKER_QUEUE_SIZE, -1);
}
if (deprecatedConfiguration.isSetUdpStatWorkerQueueSize()) {
return deprecatedConfiguration.getUdpStatWorkerQueueSize();
}
return defaultValue;
}
private boolean isWorkerThreadMonitorEnable(Properties properties, DeprecatedConfiguration deprecatedConfiguration, boolean defaultValue) {
if (properties.containsKey(WORKER_MONITOR_ENABLE)) {
return CollectorConfiguration.readBoolean(properties, WORKER_MONITOR_ENABLE);
}
if (deprecatedConfiguration.isSetUdpStatWorkerMonitor()) {
return deprecatedConfiguration.isUdpStatWorkerMonitor();
}
return defaultValue;
}
@Override
public boolean isTcpEnable() {
return isTcpEnable;
}
@Override
public String getTcpBindIp() {
return tcpBindIp;
}
@Override
public int getTcpBindPort() {
return tcpBindPort;
}
@Override
public boolean isUdpEnable() {
return isUdpEnable;
}
@Override
public String getUdpBindIp() {
return udpBindIp;
}
@Override
public int getUdpBindPort() {
return udpBindPort;
}
@Override
public int getUdpReceiveBufferSize() {
return udpReceiveBufferSize;
}
@Override
public int getWorkerThreadSize() {
return workerThreadSize;
}
@Override
public int getWorkerQueueSize() {
return workerQueueSize;
}
@Override
public boolean isWorkerMonitorEnable() {
return workerMonitorEnable;
}
@Override
public boolean isGrpcEnable() {
return isGrpcEnable;
}
@Override
public String getGrpcBindIp() {
return grpcBindIp;
}
@Override
public int getGrpcBindPort() {
return grpcBindPort;
}
public int getGrpcServerExecutorThreadSize() {
return grpcServerExecutorThreadSize;
}
public int getGrpcServerExecutorQueueSize() {
return grpcServerExecutorQueueSize;
}
public boolean isGrpcServerExecutorMonitorEnable() {
return grpcServerExecutorMonitorEnable;
}
public int getGrpcWorkerExecutorThreadSize() {
return grpcWorkerExecutorThreadSize;
}
public int getGrpcWorkerExecutorQueueSize() {
return grpcWorkerExecutorQueueSize;
}
public boolean isGrpcWorkerExecutorMonitorEnable() {
return grpcWorkerExecutorMonitorEnable;
}
public int getGrpcStreamSchedulerThreadSize() {
return grpcStreamSchedulerThreadSize;
}
public int getGrpcStreamCallInitRequestCount() {
return grpcStreamCallInitRequestCount;
}
public int getGrpcStreamSchedulerPeriodMillis() {
return grpcStreamSchedulerPeriodMillis;
}
public int getGrpcStreamSchedulerRecoveryMessageCount() {
return grpcStreamSchedulerRecoveryMessageCount;
}
public ServerOption getGrpcServerOption() {
return grpcServerOption;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("StatReceiverConfiguration{");
sb.append("isTcpEnable=").append(isTcpEnable);
sb.append(", tcpBindIp='").append(tcpBindIp).append('\'');
sb.append(", tcpBindPort=").append(tcpBindPort);
sb.append(", isUdpEnable=").append(isUdpEnable);
sb.append(", udpBindIp='").append(udpBindIp).append('\'');
sb.append(", udpBindPort=").append(udpBindPort);
sb.append(", udpReceiveBufferSize=").append(udpReceiveBufferSize);
sb.append(", workerThreadSize=").append(workerThreadSize);
sb.append(", workerQueueSize=").append(workerQueueSize);
sb.append(", workerMonitorEnable=").append(workerMonitorEnable);
sb.append(", isGrpcEnable=").append(isGrpcEnable);
sb.append(", grpcBindIp='").append(grpcBindIp).append('\'');
sb.append(", grpcBindPort=").append(grpcBindPort);
sb.append(", grpcServerExecutorThreadSize=").append(grpcServerExecutorThreadSize);
sb.append(", grpcServerExecutorQueueSize=").append(grpcServerExecutorQueueSize);
sb.append(", grpcServerExecutorMonitorEnable=").append(grpcServerExecutorMonitorEnable);
sb.append(", grpcWorkerExecutorThreadSize=").append(grpcWorkerExecutorThreadSize);
sb.append(", grpcWorkerExecutorQueueSize=").append(grpcWorkerExecutorQueueSize);
sb.append(", grpcWorkerExecutorMonitorEnable=").append(grpcWorkerExecutorMonitorEnable);
sb.append(", grpcStreamSchedulerThreadSize=").append(grpcStreamSchedulerThreadSize);
sb.append(", grpcStreamCallInitRequestCount=").append(grpcStreamCallInitRequestCount);
sb.append(", grpcStreamSchedulerPeriodMillis=").append(grpcStreamSchedulerPeriodMillis);
sb.append(", grpcStreamSchedulerRecoveryMessageCount=").append(grpcStreamSchedulerRecoveryMessageCount);
sb.append(", grpcServerOption=").append(grpcServerOption);
sb.append('}');
return sb.toString();
}
}
| |
package example.anoncomm;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import peersim.config.Configuration;
import peersim.core.CommonState;
import peersim.core.Control;
import peersim.core.Network;
/**
* This {@link Control} initializes the general parameters of the Anonymous Communication protocol,
* and schedules the sending of {@link Onion} messages through the network.
*
* Assumption: a cycle lasts 1 second.
*
* @author Guido Lena Cota
*/
public class AnonCommControl implements Control {
// PARAMETERS in the PEERSIM CONFIGURATION FILE
// ------------------------------------------------------------------------------------------------
/**
* The protocol to operate on.
*/
private static final String PAR_PROTOCOL = "protocol";
/**
* The fraction of nodes in the network that will act as a source node.
*/
private static final String PAR_SOURCE_FRACTION = "source.fraction";
/**
* The number of times a new set of source nodes candidates are allowed to start sending nodes.
*/
private static final String PAR_SOURCE_ACTIVATIONS = "source.activations";
/**
* The number of {@link Onion}s sent by each source node.
*/
private static final String PAR_ONIONS_LIST_LEN = "onionlist.length";
/**
* The number of nodes that compose a circuit.
*/
private static final String PAR_CIRCUIT_LEN = "circuit.length";
/**
* The duration (in cycles) of a circuit, before it is updated.
*/
private static final String PAR_CIRCUIT_DURATION = "circuit.duration";
/**
* The number of {@link Onion}s that can be sent at every cycle by a source node.
*/
private static final String PAR_SEND_RATE = "send_rate";
/**
* The number of {@link Onion}s that can be relayed at every cycle by a relay node.
*/
private static final String PAR_RELAY_RATE = "relay_rate";
// FIELDS
// ------------------------------------------------------------------------------------------------
/**
* The id of the protocol to operate on, as specified in {@link PAR_PROTOCOL}.
*/
private final int protocolId;
/**
* The number of nodes to activate as source nodes.
*/
private final int nrSourceNodes;
/**
* The period (in cycles) between two source nodes activations.
*/
private final int source_activation_period;
/**
* The number of {@link Onion}s sent by each source node.
*/
private final short onions_list_len;
/**
* The number of nodes that compose a circuit.
*/
public static short circuit_len;
/**
* The duration (in cycles) of a circuit, before it is updated.
*/
private final short circuit_duration;
/**
* The number of {@link Onion}s that can be relayed every cycle by a relay node.
*/
public static short relay_rate;
/**
* The number of {@link Onion}s that can be sent every cycle by a source node.
*/
public static short send_rate;
/* Support variables to manage the selection of source nodes */
/**
* Support lists for randomising the selection of source nodes.
*/
private final List<Integer> sourceNodesCandidatesIDs;
/**
* Current number of source nodes activated.
*/
private int activatedSourceNodes;
/**
* The number of nodes to activate as source nodes at each activation period.
*/
private final int activationSize;
// INITIALIZATION
// ------------------------------------------------------------------------------------------------
public AnonCommControl(String prefix)
{
protocolId = Configuration.getPid(prefix + "." + PAR_PROTOCOL);
double source_fraction = Configuration.getDouble(prefix + "." + PAR_SOURCE_FRACTION, 0.5);
int source_activations = Configuration.getInt(prefix + "." + PAR_SOURCE_ACTIVATIONS, 100);
onions_list_len = (short) Configuration.getInt(prefix + "." + PAR_ONIONS_LIST_LEN, 50);
circuit_len = (short) Configuration.getInt(prefix + "." + PAR_CIRCUIT_LEN, 5);
circuit_duration = (short) Configuration.getInt(prefix + "." + PAR_CIRCUIT_DURATION, 5);
relay_rate = (short) Configuration.getInt(prefix + "." + PAR_RELAY_RATE, 5);
send_rate = (short) Configuration.getInt(prefix + "." + PAR_SEND_RATE, 1);
// initialize the support variable used for randomizing the selection of source nodes
sourceNodesCandidatesIDs = new ArrayList<Integer>();
int nwSize = Network.size();
for(int i = 0; i < nwSize; i++)
sourceNodesCandidatesIDs.add(i);
Collections.shuffle(sourceNodesCandidatesIDs,CommonState.r);
nrSourceNodes = (int) (nwSize * source_fraction);
activatedSourceNodes = 0;
source_activation_period = (int) ((CommonState.getEndTime() - (circuit_len * (onions_list_len / send_rate))) / source_activations);
activationSize = nrSourceNodes / source_activations;
}
// THE EXECUTE METHOD
// ------------------------------------------------------------------------------------------------
@Override
public boolean execute()
{
long currentCycle = CommonState.getTime();
// check if it is time to activate source nodes
if(currentCycle % source_activation_period == 0 && activatedSourceNodes < nrSourceNodes){
int asn = activatedSourceNodes;
int lastNodeToActivate = activatedSourceNodes + activationSize;
for(int i = asn; i < lastNodeToActivate; i++){
AnonComm ac = (AnonComm) Network.get(sourceNodesCandidatesIDs.get(i)).getProtocol(protocolId);
ac.setNrOnionsToSend(onions_list_len);
ac.setCircuit(createNewCircuit(ac));
activatedSourceNodes++;
}
}
// check if it is time to update the source nodes' circuits
if(currentCycle % circuit_duration == 0){
for(int i = 0; i < activatedSourceNodes; i++){
AnonComm ac = (AnonComm) Network.get(sourceNodesCandidatesIDs.get(i)).getProtocol(protocolId);
if(ac.isActiveSource())
updateCircuit(ac);
}
}
return false;
}
/**
* Create a new circuit.
*/
private List<AnonComm> createNewCircuit(AnonComm sourceNode){
List<AnonComm> circuit = new ArrayList<AnonComm>(circuit_len);
int assignedNodes = 0;
while(assignedNodes < circuit_len){
int randomIndex = sourceNodesCandidatesIDs.get(CommonState.r.nextInt(sourceNodesCandidatesIDs.size()));
AnonComm cn = (AnonComm) Network.get(randomIndex).getProtocol(protocolId);
if(!circuit.contains(cn) && sourceNode != cn && CommonState.r.nextDouble() < (Math.max(0.01, cn.getProbabilityInteraction()))){
circuit.add(cn);
assignedNodes++;
}
}
return circuit;
}
/**
* Update an existing circuit.
*/
private List<AnonComm> updateCircuit(AnonComm sourceNode){
List<AnonComm> circuit = sourceNode.getCircuit();
int assignedNodes = 0;
while(assignedNodes < circuit_len-1){
int randomIndex = sourceNodesCandidatesIDs.get(CommonState.r.nextInt(sourceNodesCandidatesIDs.size()));
AnonComm cn = (AnonComm) Network.get(randomIndex).getProtocol(protocolId);
if(!circuit.contains(cn) && sourceNode != cn && CommonState.r.nextDouble() < (Math.max(0.01, cn.getProbabilityInteraction()))){
circuit.set(assignedNodes, cn);
assignedNodes++;
}
}
return circuit;
}
}
| |
package io.jenkins.blueocean.rest.impl.pipeline;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import hudson.model.Item;
import hudson.model.Queue;
import io.jenkins.blueocean.commons.ServiceException.UnexpectedErrorException;
import io.jenkins.blueocean.rest.Navigable;
import io.jenkins.blueocean.rest.Reachable;
import io.jenkins.blueocean.rest.annotation.Capability;
import io.jenkins.blueocean.rest.factory.BluePipelineFactory;
import io.jenkins.blueocean.rest.hal.Link;
import io.jenkins.blueocean.rest.impl.pipeline.scm.ScmSourceImpl;
import io.jenkins.blueocean.rest.model.BlueActionProxy;
import io.jenkins.blueocean.rest.model.BlueFavorite;
import io.jenkins.blueocean.rest.model.BlueFavoriteAction;
import io.jenkins.blueocean.rest.model.BlueIcon;
import io.jenkins.blueocean.rest.model.BlueOrganization;
import io.jenkins.blueocean.rest.model.BlueOrganizationFolder;
import io.jenkins.blueocean.rest.model.BluePipelineContainer;
import io.jenkins.blueocean.rest.model.BluePipelineScm;
import io.jenkins.blueocean.rest.model.BlueQueueContainer;
import io.jenkins.blueocean.rest.model.BlueQueueItem;
import io.jenkins.blueocean.rest.model.BlueRun;
import io.jenkins.blueocean.rest.model.BlueRunContainer;
import io.jenkins.blueocean.rest.model.BlueScmSource;
import io.jenkins.blueocean.rest.model.Resource;
import io.jenkins.blueocean.service.embedded.rest.PipelineFolderImpl;
import io.jenkins.blueocean.service.embedded.rest.QueueItemImpl;
import jenkins.branch.OrganizationFolder;
import jenkins.model.Jenkins;
import jenkins.scm.api.metadata.AvatarMetadataAction;
import org.jenkinsci.plugins.workflow.multibranch.WorkflowMultiBranchProject;
import org.kohsuke.stapler.Stapler;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.kohsuke.stapler.export.Exported;
import org.kohsuke.stapler.json.JsonBody;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static io.jenkins.blueocean.rest.model.KnownCapabilities.BLUE_SCM;
import static io.jenkins.blueocean.rest.model.KnownCapabilities.JENKINS_ORGANIZATION_FOLDER;
/**
* BlueOcean abstraction of {@link OrganizationFolder}
*
* @author Vivek Pandey
*/
@Capability({JENKINS_ORGANIZATION_FOLDER, BLUE_SCM})
public abstract class OrganizationFolderPipelineImpl extends BlueOrganizationFolder {
final OrganizationFolder folder;
private final PipelineFolderImpl pipelineFolder;
private final BlueOrganization organization;
public OrganizationFolderPipelineImpl(BlueOrganization organization, OrganizationFolder folder, Link parent) {
this.organization = organization;
this.folder = folder;
this.pipelineFolder = new PipelineFolderImpl(organization, folder, parent);
}
@Override
public BlueIcon getIcon() {
final AvatarMetadataAction action = folder.getAction(AvatarMetadataAction.class);
return action != null ? new OrganizationIcon(action, getLink()) : null;
}
@Navigable
public BluePipelineContainer getPipelines(){
return new MultiBranchPipelineContainerImpl(organization, folder, this);
}
@Override
public Integer getNumberOfFolders() {
return pipelineFolder.getNumberOfFolders();
}
@Override
public Integer getNumberOfPipelines() {
return pipelineFolder.getNumberOfPipelines();
}
@Override
@Navigable
public BlueRunContainer getRuns() {
return new OrganizationFolderRunContainerImpl(this, this);
}
@Override
public Collection<BlueActionProxy> getActions() {
return pipelineFolder.getActions();
}
@Override
public String getOrganizationName() {
return organization.getName();
}
@Nonnull
@Override
public BlueOrganization getOrganization() {
return organization;
}
@Override
public String getName() {
return pipelineFolder.getName();
}
@Override
public String getDisplayName() {
return pipelineFolder.getDisplayName();
}
@Override
public String getFullName() {
return pipelineFolder.getFullName();
}
@Override
public String getFullDisplayName() {
return pipelineFolder.getFullDisplayName();
}
@Override
@Exported(inline = true)
public BlueRun getLatestRun() {
return new OrganizationFolderRunContainerImpl(this, this).get(OrganizationFolderRunImpl.RUN_ID);
}
@Override
public Iterable<String> getPipelineFolderNames() {
return Iterables.transform(folder.getItems(), new Function<Item, String>() {
@Override
public String apply(@Nullable Item input) {
if(input instanceof WorkflowMultiBranchProject){
return input.getName();
}
return null;
}
});
}
@Override
public Link getLink() {
return pipelineFolder.getLink();
}
/**
* Certain SCM provider organization folder implementation might support filtered repo search, if thats the case this method
* must be overriden by their implementations.
*/
@Override
public boolean isScanAllRepos() {
return true;
}
@Override
public BlueScmSource getScmSource() {
return new ScmSourceImpl(folder);
}
//lower than PipelineFolderImpl.PipelineFactoryImpl so that it gets looked up first
public abstract static class OrganizationFolderFactory extends BluePipelineFactory {
protected abstract OrganizationFolderPipelineImpl getFolder(jenkins.branch.OrganizationFolder folder, Reachable parent, BlueOrganization organization);
@Override
public OrganizationFolderPipelineImpl getPipeline(Item item, Reachable parent, BlueOrganization organization) {
if (item instanceof jenkins.branch.OrganizationFolder) {
return getFolder( (jenkins.branch.OrganizationFolder)item, parent, organization);
}
return null;
}
@Override
public Resource resolve(Item context, Reachable parent, Item target, BlueOrganization organization) {
OrganizationFolderPipelineImpl folder = getPipeline(context, parent, organization);
if (folder!=null) {
if(context == target){
return folder;
}
Item nextChild = findNextStep(folder.folder,target);
for (BluePipelineFactory f : all()) {
Resource answer = f.resolve(nextChild, folder, target, organization);
if (answer!=null)
return answer;
}
}
return null;
}
}
@Override
public BlueQueueContainer getQueue() {
return new BlueQueueContainer() {
@Override
public BlueQueueItem get(String name) {
for(Queue.Item item: Jenkins.getInstance().getQueue().getItems(folder)){
if(item.getId() == Long.parseLong(name)){
return new QueueItemImpl(organization, item, OrganizationFolderPipelineImpl.this, 1);
}
}
return null;
}
@Override
public Link getLink() {
return OrganizationFolderPipelineImpl.this.getLink().rel("queue");
}
@Override
public Iterator<BlueQueueItem> iterator() {
return new Iterator<BlueQueueItem>(){
Iterator<Queue.Item> it = Jenkins.getInstance().getQueue().getItems(folder).iterator();
@Override
public boolean hasNext() {
return it.hasNext();
}
@Override
public BlueQueueItem next() {
return new QueueItemImpl(organization, it.next(), OrganizationFolderPipelineImpl.this, 1);
}
@Override
public void remove() {
//noop
}
};
}
};
}
@Override
public List<Object> getParameters() {
return null;
}
@Override
public BlueFavorite favorite(@JsonBody BlueFavoriteAction favoriteAction) {
return null;
}
@Override
public Map<String, Boolean> getPermissions() {
return null;
}
@Override
public BluePipelineScm getScm() {
return new ScmResourceImpl(folder, this);
}
protected OrganizationFolder getFolder() {
return folder;
}
public static class OrganizationIcon extends BlueIcon {
private final AvatarMetadataAction action;
private final Link parent;
public OrganizationIcon(AvatarMetadataAction action, Link parent) {
this.action = action;
this.parent = parent;
}
@Override
public void getUrl() {
StaplerRequest req = Stapler.getCurrentRequest();
String s = req.getParameter("s");
if (s == null) {
s = Integer.toString(DEFAULT_ICON_SIZE);
}
StaplerResponse resp = Stapler.getCurrentResponse();
try {
resp.setHeader("Cache-Control", "max-age=" + TimeUnit.DAYS.toDays(7));
resp.sendRedirect(action.getAvatarImageOf(s));
} catch (IOException e) {
throw new UnexpectedErrorException("Could not provide icon", e);
}
}
@Override
public Link getLink() {
return parent.rel("icon");
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* created at Feb 19, 2002
*
* @author Jeka
*/
package com.intellij.compiler.cache;
import com.intellij.compiler.classParsing.*;
import com.intellij.compiler.make.CacheCorruptedException;
import com.intellij.compiler.make.CacheUtils;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.cls.ClsUtil;
import consulo.java.module.util.JavaClassNames;
import consulo.logging.Logger;
import consulo.util.collection.primitive.ints.IntMaps;
import consulo.util.collection.primitive.ints.IntObjectMap;
import consulo.util.collection.primitive.ints.IntSet;
import consulo.util.collection.primitive.ints.IntSets;
import org.jetbrains.annotations.NonNls;
import java.util.*;
class JavaDependencyProcessor
{
private static final Logger LOG = Logger.getInstance("#com.intellij.compiler.make.JavaDependencyProcessor");
private final JavaDependencyCache myJavaDependencyCache;
private final int myQName;
private final Map<Dependency.MethodRef, MethodInfo> myRefToMethodMap = new HashMap<Dependency.MethodRef, MethodInfo>();
private final Map<Dependency.FieldRef, FieldInfo> myRefToFieldMap = new HashMap<Dependency.FieldRef, FieldInfo>();
private final Set<MemberInfo> myAddedMembers = new HashSet<MemberInfo>();
private final Set<MemberInfo> myRemovedMembers = new HashSet<MemberInfo>();
private final Set<MemberInfo> myChangedMembers = new HashSet<MemberInfo>();
private final Map<MemberInfo, ChangeDescription> myChangeDescriptions = new HashMap<MemberInfo, ChangeDescription>();
private Dependency[] myBackDependencies;
private final boolean myMembersChanged;
private final boolean mySuperInterfaceAdded;
private final boolean mySuperInterfaceRemoved;
private final boolean mySuperClassChanged;
private final boolean mySuperlistGenericSignatureChanged;
private final boolean mySuperClassAdded;
private final Project myProject;
private final boolean myIsAnnotation;
private final boolean myIsRemoteInterface;
private final boolean myWereAnnotationTargetsRemoved;
private final boolean myRetentionPolicyChanged;
private final boolean myAnnotationSemanticsChanged;
public JavaDependencyProcessor(Project project, JavaDependencyCache javaDependencyCache, int qName) throws CacheCorruptedException
{
myProject = project;
myJavaDependencyCache = javaDependencyCache;
myQName = qName;
final Cache cache = javaDependencyCache.getCache();
final Cache newClassesCache = javaDependencyCache.getNewClassesCache();
final MethodInfo[] oldMethods = cache.getMethods(qName);
for(MethodInfo method : oldMethods)
{
myRefToMethodMap.put(new Dependency.MethodRef(method.getName(), method.getDescriptor()), method);
}
final IntObjectMap<FieldInfo> oldFieldsMap = getFieldInfos(cache, qName);
oldFieldsMap.forEach((fieldName, fieldInfo) -> myRefToFieldMap.put(new Dependency.FieldRef(fieldName), fieldInfo));
final Map<String, MethodInfoContainer> oldMethodsMap = getMethodInfos(oldMethods);
final Map<String, MethodInfoContainer> newMethodsMap = getMethodInfos(newClassesCache.getMethods(qName));
final IntObjectMap<FieldInfo> newFieldsMap = getFieldInfos(newClassesCache, qName);
addAddedMembers(oldFieldsMap, oldMethodsMap, newFieldsMap, newMethodsMap, myAddedMembers);
addRemovedMembers(oldFieldsMap, oldMethodsMap, newFieldsMap, newMethodsMap, myRemovedMembers);
addChangedMembers(oldFieldsMap, oldMethodsMap, newFieldsMap, newMethodsMap, myChangedMembers);
myMembersChanged = !myAddedMembers.isEmpty() || !myRemovedMembers.isEmpty() || !myChangedMembers.isEmpty();
// track changes in super list
myIsRemoteInterface = JavaMakeUtil.isInterface(cache.getFlags(myQName)) && cache.isRemote(qName);
myIsAnnotation = ClsUtil.isAnnotation(cache.getFlags(qName));
myWereAnnotationTargetsRemoved = myIsAnnotation && wereAnnotationTargesRemoved(cache, newClassesCache);
myRetentionPolicyChanged = myIsAnnotation && hasRetentionPolicyChanged(cache, newClassesCache);
myAnnotationSemanticsChanged = myIsAnnotation && hasAnnotationSemanticsChanged(cache, newClassesCache);
int[] oldInterfaces = cache.getSuperInterfaces(qName);
int[] newInterfaces = newClassesCache.getSuperInterfaces(qName);
mySuperInterfaceRemoved = wereInterfacesRemoved(oldInterfaces, newInterfaces);
mySuperInterfaceAdded = wereInterfacesRemoved(newInterfaces, oldInterfaces);
mySuperlistGenericSignatureChanged = isSuperlistGenericSignatureChanged(cache.getGenericSignature(qName),
newClassesCache.getGenericSignature(qName));
boolean superclassesDiffer = cache.getSuperQualifiedName(qName) != newClassesCache.getSuperQualifiedName(qName);
boolean wasDerivedFromObject = JavaClassNames.JAVA_LANG_OBJECT.equals(javaDependencyCache.resolve(cache.getSuperQualifiedName(qName)));
mySuperClassChanged = !wasDerivedFromObject && superclassesDiffer;
mySuperClassAdded = wasDerivedFromObject && superclassesDiffer;
}
private static boolean hasMembersWithoutDefaults(Set<MemberInfo> addedMembers)
{
for(final Object addedMember : addedMembers)
{
MemberInfo memberInfo = (MemberInfo) addedMember;
if(memberInfo instanceof MethodInfo)
{
final ConstantValue annotationDefault = ((MethodInfo) memberInfo).getAnnotationDefault();
if(ConstantValue.EMPTY_CONSTANT_VALUE.equals(annotationDefault))
{
return true;
}
}
}
return false;
}
private boolean wereAnnotationDefaultsRemoved()
{
for(final MemberInfo memberInfo : myChangeDescriptions.keySet())
{
if(memberInfo instanceof MethodInfo)
{
MethodChangeDescription description = (MethodChangeDescription) myChangeDescriptions.get(memberInfo);
if(description.removedAnnotationDefault)
{
return true;
}
}
}
return false;
}
private boolean isSuperlistGenericSignatureChanged(int oldGenericSignature, int newGenericSignature) throws CacheCorruptedException
{
if(oldGenericSignature == newGenericSignature)
{
return false;
}
if(oldGenericSignature != -1 && newGenericSignature != -1)
{
final SymbolTable symbolTable = myJavaDependencyCache.getSymbolTable();
final String _oldGenericMethodSignature = cutFormalParams(symbolTable.getSymbol(oldGenericSignature));
final String _newGenericMethodSignature = cutFormalParams(symbolTable.getSymbol(newGenericSignature));
return !_oldGenericMethodSignature.equals(_newGenericMethodSignature);
}
return true;
}
private static String cutFormalParams(String genericClassSignature)
{
if(genericClassSignature.charAt(0) == '<')
{
int idx = genericClassSignature.indexOf('>');
return genericClassSignature.substring(idx + 1);
}
return genericClassSignature;
}
public void run() throws CacheCorruptedException
{
if(LOG.isDebugEnabled())
{
LOG.debug("Checking dependencies for " + myJavaDependencyCache.resolve(myQName));
}
final boolean superListChanged = mySuperClassChanged || mySuperClassAdded || mySuperInterfaceAdded || mySuperInterfaceRemoved || mySuperlistGenericSignatureChanged;
final Cache oldCache = myJavaDependencyCache.getCache();
final Cache newCache = myJavaDependencyCache.getNewClassesCache();
if(!myMembersChanged &&
oldCache.getFlags(myQName) == newCache.getFlags(myQName) &&
!superListChanged && !myWereAnnotationTargetsRemoved && !myRetentionPolicyChanged && !myAnnotationSemanticsChanged)
{
return; // nothing to do
}
if(myIsAnnotation)
{
if(myAnnotationSemanticsChanged)
{
final IntSet visited = IntSets.newHashSet();
visited.add(myQName);
markAnnotationDependenciesRecursively(getBackDependencies(), LOG.isDebugEnabled() ? "; reason: semantics changed for " + myJavaDependencyCache.resolve(myQName) : "", visited);
return;
}
if(hasMembersWithoutDefaults(myAddedMembers))
{
markAll(getBackDependencies(), LOG.isDebugEnabled() ? "; reason: added annotation type member without default " + myJavaDependencyCache.resolve(myQName) : "");
return;
}
if(!myRemovedMembers.isEmpty())
{
markAll(getBackDependencies(), LOG.isDebugEnabled() ? "; reason: removed annotation type member " + myJavaDependencyCache.resolve(myQName) : "");
return;
}
if(!myChangedMembers.isEmpty())
{ // for annotations "changed" means return type changed
markAll(getBackDependencies(), LOG.isDebugEnabled() ? "; reason: changed annotation member's type " + myJavaDependencyCache.resolve(myQName) : "");
return;
}
if(wereAnnotationDefaultsRemoved())
{
markAll(getBackDependencies(), LOG.isDebugEnabled() ? "; reason: removed annotation member's default value " + myJavaDependencyCache.resolve(myQName) : "");
return;
}
if(myWereAnnotationTargetsRemoved)
{
markAll(getBackDependencies(), LOG.isDebugEnabled() ? "; reason: removed annotation's targets " + myJavaDependencyCache.resolve(myQName) : "");
return;
}
if(myRetentionPolicyChanged)
{
markAll(getBackDependencies(), LOG.isDebugEnabled() ? "; reason: retention policy changed for " + myJavaDependencyCache.resolve(myQName) : "");
return;
}
}
final JavaDependencyCacheNavigator cacheNavigator = myJavaDependencyCache.getCacheNavigator();
if(mySuperClassChanged || mySuperInterfaceRemoved || mySuperlistGenericSignatureChanged)
{
// superclass changed == old removed and possibly new added
// if anything (class or interface) in the superlist was removed, should recompile all subclasses (both direct and indirect)
// and all back-dependencies of this class and its subclasses
markAll(getBackDependencies(), LOG.isDebugEnabled() ? "; reason: deleted items from the superlist or changed superlist generic signature of " + myJavaDependencyCache.resolve(myQName) :
"");
cacheNavigator.walkSubClasses(myQName, new ClassInfoProcessor()
{
public boolean process(int classQName) throws CacheCorruptedException
{
markAll(oldCache.getBackDependencies(classQName), LOG.isDebugEnabled() ? "; reason: deleted items from the superlist or changed superlist generic signature of " +
myJavaDependencyCache.resolve(myQName) : "");
return true;
}
});
return;
}
final boolean isKindChanged =
(JavaMakeUtil.isInterface(oldCache.getFlags(myQName)) && !JavaMakeUtil.isInterface(newCache.getFlags(myQName))) ||
(!JavaMakeUtil.isInterface(oldCache.getFlags(myQName)) && JavaMakeUtil.isInterface(newCache.getFlags(myQName)));
if(isKindChanged)
{
markAll(getBackDependencies(), LOG.isDebugEnabled() ? "; reason: class kind changed (class/interface) " + myJavaDependencyCache.resolve(myQName) : "");
cacheNavigator.walkSubClasses(myQName, new ClassInfoProcessor()
{
public boolean process(int classQName) throws CacheCorruptedException
{
markAll(oldCache.getBackDependencies(classQName), LOG.isDebugEnabled() ? "; reason: class kind changed (class/interface) " + myJavaDependencyCache.resolve(myQName) : "");
return true;
}
});
return;
}
boolean becameFinal = !ClsUtil.isFinal(oldCache.getFlags(myQName)) && ClsUtil.isFinal(newCache.getFlags(myQName));
if(becameFinal)
{
markAll(getBackDependencies(), LOG.isDebugEnabled() ? "; reason: class became final: " + myJavaDependencyCache.resolve(myQName) : "");
}
else
{
boolean becameAbstract = !ClsUtil.isAbstract(oldCache.getFlags(myQName)) && ClsUtil.isAbstract(newCache.getFlags(myQName));
boolean accessRestricted = JavaMakeUtil.isMoreAccessible(oldCache.getFlags(myQName), newCache.getFlags(myQName));
Set<MethodInfo> removedMethods = null;
Set<MethodInfo> addedMethods = null;
for(Dependency backDependency : getBackDependencies())
{
if(myJavaDependencyCache.isTargetClassInfoMarked(backDependency))
{
continue;
}
if(accessRestricted)
{
if(myJavaDependencyCache.markTargetClassInfo(backDependency))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(backDependency.getClassQualifiedName()) + "; reason: " +
myJavaDependencyCache.resolve(myQName) + " made less accessible");
}
}
continue;
}
if(becameAbstract)
{
if(processClassBecameAbstract(backDependency))
{
continue;
}
}
if(isDependentOnRemovedMembers(backDependency))
{
if(myJavaDependencyCache.markTargetClassInfo(backDependency))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(backDependency.getClassQualifiedName()) +
"; reason: the class uses removed members of " + myJavaDependencyCache.resolve(myQName));
}
}
continue;
}
if(isDependentOnChangedMembers(backDependency))
{
if(myJavaDependencyCache.markTargetClassInfo(backDependency))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(backDependency.getClassQualifiedName()) +
"; reason: the class uses changed members of " + myJavaDependencyCache.resolve(myQName));
}
}
continue;
}
final Collection<Dependency.MethodRef> usedMethods = backDependency.getMethodRefs();
if(removedMethods == null)
{
removedMethods = extractMethods(myRemovedMembers, true);
}
if(isDependentOnEquivalentMethods(usedMethods, removedMethods))
{
if(myJavaDependencyCache.markTargetClassInfo(backDependency))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(backDependency.getClassQualifiedName()) +
"; reason: some overloaded methods of " + myJavaDependencyCache.resolve(myQName) + " were removed");
}
}
continue;
}
if(addedMethods == null)
{
addedMethods = extractMethods(myAddedMembers, true);
}
if(isDependentOnEquivalentMethods(usedMethods, addedMethods))
{
if(myJavaDependencyCache.markTargetClassInfo(backDependency))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(backDependency.getClassQualifiedName()) +
"; reason: some overloaded methods of " + myJavaDependencyCache.resolve(myQName) + " were added");
}
}
}
}
}
final Set<MethodInfo> methodsToCheck = new HashSet<MethodInfo>();
extractMethods(myRemovedMembers, methodsToCheck, false);
processInheritanceDependencies(methodsToCheck);
extractMethods(myAddedMembers, methodsToCheck, false);
if(!JavaMakeUtil.isAnonymous(myJavaDependencyCache.resolve(myQName)))
{
// these checks make no sense for anonymous classes
final IntSet fieldNames = IntSets.newHashSet();
extractFieldNames(myAddedMembers, fieldNames);
int addedFieldsCount = fieldNames.size();
extractFieldNames(myRemovedMembers, fieldNames);
if(!fieldNames.isEmpty())
{
cacheNavigator.walkSuperClasses(myQName, new ClassInfoProcessor()
{
public boolean process(final int classQName) throws CacheCorruptedException
{
markUseDependenciesOnFields(classQName, fieldNames);
return true;
}
});
}
if(addedFieldsCount > 0 && JavaMakeUtil.isInterface(oldCache.getFlags(myQName)))
{
final IntSet visitedClasses = IntSets.newHashSet();
visitedClasses.add(myQName);
cacheNavigator.walkSubClasses(myQName, new ClassInfoProcessor()
{
public boolean process(int subclassQName) throws CacheCorruptedException
{
markUseDependenciesOnFields(subclassQName, fieldNames);
visitedClasses.add(subclassQName);
cacheNavigator.walkSuperClasses(subclassQName, new ClassInfoProcessor()
{
public boolean process(int superclassQName) throws CacheCorruptedException
{
if(visitedClasses.contains(superclassQName))
{
return false;
}
markUseDependenciesOnFields(superclassQName, fieldNames);
visitedClasses.add(superclassQName);
return true;
}
});
return true;
}
});
}
if(!methodsToCheck.isEmpty())
{
cacheNavigator.walkSuperClasses(myQName, new ClassInfoProcessor()
{
public boolean process(int classQName) throws CacheCorruptedException
{
markUseDependenciesOnEquivalentMethods(classQName, methodsToCheck, myQName);
return true;
}
});
cacheNavigator.walkSubClasses(myQName, new ClassInfoProcessor()
{
public boolean process(int classQName) throws CacheCorruptedException
{
markUseDependenciesOnEquivalentMethods(classQName, methodsToCheck, myQName);
return true;
}
});
}
// check referencing members in subclasses
final IntSet addedOrRemovedFields = IntSets.newHashSet();
final IntSet addedOrRemovedMethods = IntSets.newHashSet();
for(Set<MemberInfo> infos : Arrays.asList(myAddedMembers, myRemovedMembers))
{
for(MemberInfo member : infos)
{
if(!member.isPrivate())
{
if(member instanceof FieldInfo)
{
addedOrRemovedFields.add(member.getName());
}
else if(member instanceof MethodInfo)
{
addedOrRemovedMethods.add(member.getName());
}
}
}
}
if(!addedOrRemovedFields.isEmpty() || !addedOrRemovedMethods.isEmpty())
{
cacheNavigator.walkSubClasses(myQName, new ClassInfoProcessor()
{
public boolean process(final int subclassQName) throws CacheCorruptedException
{
if(!myJavaDependencyCache.isClassInfoMarked(subclassQName))
{
if(referencesMembersWithNames(oldCache, subclassQName, addedOrRemovedFields, addedOrRemovedMethods))
{
final boolean marked = myJavaDependencyCache.markClass(subclassQName);
if(marked && LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; Reason: members were added/removed in superclass with names, that may clash" +
" " +
"with the names of members of another classes that this class references");
}
}
}
return true;
}
});
}
}
}
private static boolean referencesMembersWithNames(Cache cache, final int qName, IntSet fieldNames, IntSet methodNames) throws CacheCorruptedException
{
for(final int referencedClass : cache.getReferencedClasses(qName))
{
for(Dependency dependency : cache.getBackDependencies(referencedClass))
{
if(dependency.getClassQualifiedName() == qName)
{
for(Dependency.FieldRef ref : dependency.getFieldRefs())
{
if(fieldNames.contains(ref.name))
{
return true;
}
}
for(Dependency.MethodRef ref : dependency.getMethodRefs())
{
if(methodNames.contains(ref.name))
{
return true;
}
}
}
}
}
return false;
}
private void markAnnotationDependenciesRecursively(final Dependency[] dependencies, final @NonNls String reason, final IntSet visitedAnnotations)
throws CacheCorruptedException
{
final Cache oldCache = myJavaDependencyCache.getCache();
for(Dependency dependency : dependencies)
{
if(myJavaDependencyCache.markTargetClassInfo(dependency))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(dependency.getClassQualifiedName()) + reason);
}
}
final int depQName = dependency.getClassQualifiedName();
if(ClsUtil.isAnnotation(oldCache.getFlags(depQName)))
{
if(!visitedAnnotations.contains(depQName))
{
visitedAnnotations.add(depQName);
markAnnotationDependenciesRecursively(oldCache.getBackDependencies(depQName), LOG.isDebugEnabled() ? "; reason: cascade semantics change for " + myJavaDependencyCache.resolve
(depQName) : "", visitedAnnotations);
}
}
}
}
private static final int[] ALL_TARGETS = {
AnnotationTargets.ANNOTATION_TYPE,
AnnotationTargets.CONSTRUCTOR,
AnnotationTargets.FIELD,
AnnotationTargets.LOCAL_VARIABLE,
AnnotationTargets.METHOD,
AnnotationTargets.PACKAGE,
AnnotationTargets.PARAMETER,
AnnotationTargets.TYPE
};
private boolean wereAnnotationTargesRemoved(final Cache oldCache, final Cache newCache) throws CacheCorruptedException
{
final int oldAnnotationTargets = JavaMakeUtil.getAnnotationTargets(oldCache, myQName, myJavaDependencyCache.getSymbolTable());
final int newAnnotationTargets = JavaMakeUtil.getAnnotationTargets(newCache, myQName, myJavaDependencyCache.getSymbolTable());
if(oldAnnotationTargets == newAnnotationTargets)
{
return false;
}
for(final int target : ALL_TARGETS)
{
if((oldAnnotationTargets & target) != 0 && (newAnnotationTargets & target) == 0)
{
return true;
}
}
return false;
}
private boolean hasRetentionPolicyChanged(final Cache oldCache, final Cache newCache) throws CacheCorruptedException
{
// if retention policy changed from SOURCE to CLASS or RUNTIME, all sources should be recompiled to propagate changes
final int oldPolicy = JavaMakeUtil.getAnnotationRetentionPolicy(myQName, oldCache, myJavaDependencyCache.getSymbolTable());
final int newPolicy = JavaMakeUtil.getAnnotationRetentionPolicy(myQName, newCache, myJavaDependencyCache.getSymbolTable());
if(oldPolicy == RetentionPolicies.SOURCE && (newPolicy == RetentionPolicies.CLASS || newPolicy == RetentionPolicies.RUNTIME))
{
return true;
}
return oldPolicy == RetentionPolicies.CLASS && newPolicy == RetentionPolicies.RUNTIME;
}
private boolean hasAnnotationSemanticsChanged(final Cache oldCache, final Cache newCache) throws CacheCorruptedException
{
final IntObjectMap<AnnotationConstantValue> oldAnnotations = fetchAllAnnotations(oldCache);
final IntObjectMap<AnnotationConstantValue> newAnnotations = fetchAllAnnotations(newCache);
// filter certain known annotation which are processed separately
final int retentionAnnotation = myJavaDependencyCache.getSymbolTable().getId("java.lang.annotation.Retention");
final int targetAnnotation = myJavaDependencyCache.getSymbolTable().getId("java.lang.annotation.Target");
oldAnnotations.remove(retentionAnnotation);
oldAnnotations.remove(targetAnnotation);
newAnnotations.remove(retentionAnnotation);
newAnnotations.remove(targetAnnotation);
if(oldAnnotations.size() != newAnnotations.size())
{
return true; // number of annotation has changed
}
for(int annotName : oldAnnotations.keys())
{
if(!newAnnotations.containsKey(annotName))
{
return true;
}
final AnnotationNameValuePair[] oldValues = oldAnnotations.get(annotName).getMemberValues();
final AnnotationNameValuePair[] newValues = newAnnotations.get(annotName).getMemberValues();
if(annotationValuesDiffer(oldValues, newValues))
{
return true;
}
}
return false;
}
private boolean annotationValuesDiffer(final AnnotationNameValuePair[] oldValues, final AnnotationNameValuePair[] newValues)
{
if(oldValues.length != newValues.length)
{
return true;
}
final IntObjectMap<ConstantValue> names = IntMaps.newIntObjectHashMap();
for(AnnotationNameValuePair value : oldValues)
{
names.put(value.getName(), value.getValue());
}
for(AnnotationNameValuePair value : newValues)
{
if(!names.containsKey(value.getName()))
{
return true;
}
if(!value.getValue().equals(names.get(value.getName())))
{
return true;
}
}
return false;
}
private IntObjectMap<AnnotationConstantValue> fetchAllAnnotations(final Cache cache) throws CacheCorruptedException
{
final int classId = myQName;
IntObjectMap<AnnotationConstantValue> oldAnnotations = IntMaps.newIntObjectHashMap();
for(AnnotationConstantValue annot : cache.getRuntimeVisibleAnnotations(classId))
{
oldAnnotations.put(annot.getAnnotationQName(), annot);
}
for(AnnotationConstantValue annot : cache.getRuntimeInvisibleAnnotations(classId))
{
oldAnnotations.put(annot.getAnnotationQName(), annot);
}
return oldAnnotations;
}
private void markAll(Dependency[] backDependencies, @NonNls String reason) throws CacheCorruptedException
{
for(Dependency backDependency : backDependencies)
{
if(myJavaDependencyCache.markTargetClassInfo(backDependency))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(backDependency.getClassQualifiedName()) + reason);
}
}
}
}
private static void extractFieldNames(Collection<MemberInfo> fromCollection, IntSet toCollection)
{
for(final Object aFromCollection : fromCollection)
{
MemberInfo memberInfo = (MemberInfo) aFromCollection;
if(memberInfo instanceof FieldInfo)
{
toCollection.add(memberInfo.getName());
}
}
}
private static Set<MethodInfo> extractMethods(Collection<MemberInfo> fromCollection, boolean includeConstructors)
{
final Set<MethodInfo> methods = new HashSet<MethodInfo>();
extractMethods(fromCollection, methods, includeConstructors);
return methods;
}
private static void extractMethods(Collection<MemberInfo> fromCollection, Collection<MethodInfo> toCollection, boolean includeConstructors)
{
for(final MemberInfo memberInfo : fromCollection)
{
if(memberInfo instanceof MethodInfo)
{
final MethodInfo methodInfo = (MethodInfo) memberInfo;
if(includeConstructors)
{
toCollection.add(methodInfo);
}
else
{
if(!methodInfo.isConstructor())
{
toCollection.add(methodInfo);
}
}
}
}
}
private boolean processClassBecameAbstract(Dependency dependency) throws CacheCorruptedException
{
for(Dependency.MethodRef ref : dependency.getMethodRefs())
{
final MethodInfo usedMethod = myRefToMethodMap.get(ref);
if(usedMethod == null)
{
continue;
}
if(usedMethod.isConstructor())
{
if(myJavaDependencyCache.markTargetClassInfo(dependency))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(dependency.getClassQualifiedName()) + "; reason: " +
myJavaDependencyCache.resolve(myQName) + " made abstract");
}
}
return true;
}
}
return false;
}
private boolean isDependentOnRemovedMembers(Dependency dependency)
{
for(Dependency.MethodRef ref : dependency.getMethodRefs())
{
if(myRemovedMembers.contains(myRefToMethodMap.get(ref)))
{
return true;
}
}
for(Dependency.FieldRef ref : dependency.getFieldRefs())
{
if(myRemovedMembers.contains(myRefToFieldMap.get(ref)))
{
return true;
}
}
return false;
}
private boolean isDependentOnChangedMembers(Dependency dependency)
{
for(Dependency.FieldRef ref : dependency.getFieldRefs())
{
final FieldInfo fieldInfo = myRefToFieldMap.get(ref);
if(myChangedMembers.contains(fieldInfo))
{
return true;
}
}
for(Dependency.MethodRef ref : dependency.getMethodRefs())
{
final MethodInfo methodInfo = myRefToMethodMap.get(ref);
if(myChangedMembers.contains(methodInfo))
{
final MethodChangeDescription changeDescription = (MethodChangeDescription) myChangeDescriptions.get(methodInfo);
if(changeDescription.returnTypeDescriptorChanged ||
changeDescription.returnTypeGenericSignatureChanged ||
changeDescription.paramsGenericSignatureChanged ||
changeDescription.throwsListChanged ||
changeDescription.staticPropertyChanged ||
changeDescription.accessRestricted)
{
return true;
}
}
}
return false;
}
private boolean isDependentOnEquivalentMethods(Collection<Dependency.MethodRef> checkedMembers, Set<MethodInfo> members) throws CacheCorruptedException
{
// check if 'members' contains method with the same name and the same numbers of parameters, but with different types
if(checkedMembers.isEmpty() || members.isEmpty())
{
return false; // optimization
}
for(Dependency.MethodRef checkedMethod : checkedMembers)
{
if(hasEquivalentMethod(members, checkedMethod))
{
return true;
}
}
return false;
}
private void markUseDependenciesOnEquivalentMethods(final int checkedInfoQName, Set<MethodInfo> methodsToCheck, int methodsClassName) throws CacheCorruptedException
{
final Dependency[] backDependencies = myJavaDependencyCache.getCache().getBackDependencies(checkedInfoQName);
for(Dependency dependency : backDependencies)
{
if(myJavaDependencyCache.isTargetClassInfoMarked(dependency))
{
continue;
}
if(isDependentOnEquivalentMethods(dependency.getMethodRefs(), methodsToCheck))
{
if(myJavaDependencyCache.markTargetClassInfo(dependency))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(dependency.getClassQualifiedName()) +
"; reason: more specific methods added to " + myJavaDependencyCache.resolve(methodsClassName));
}
}
myJavaDependencyCache.addClassToUpdate(checkedInfoQName);
}
}
}
private void markUseDependenciesOnFields(final int classQName, IntSet fieldNames) throws CacheCorruptedException
{
final Cache oldCache = myJavaDependencyCache.getCache();
for(Dependency useDependency : oldCache.getBackDependencies(classQName))
{
if(!myJavaDependencyCache.isTargetClassInfoMarked(useDependency))
{
for(Dependency.FieldRef field : useDependency.getFieldRefs())
{
if(fieldNames.contains(field.name))
{
if(myJavaDependencyCache.markTargetClassInfo(useDependency))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(useDependency.getClassQualifiedName()) +
"; reason: conflicting fields were added to the hierarchy of the class " + myJavaDependencyCache.resolve(classQName));
}
}
myJavaDependencyCache.addClassToUpdate(classQName);
break; // stop iterating fields
}
}
}
}
}
private void processInheritanceDependencies(final Set<MethodInfo> removedMethods) throws CacheCorruptedException
{
final Cache oldCache = myJavaDependencyCache.getCache();
final Cache newCache = myJavaDependencyCache.getNewClassesCache();
final boolean becameFinal = !ClsUtil.isFinal(oldCache.getFlags(myQName)) && ClsUtil.isFinal(newCache.getFlags(myQName));
final SymbolTable symbolTable = myJavaDependencyCache.getSymbolTable();
final Set<MemberInfo> removedConcreteMethods = fetchNonAbstractMethods(myRemovedMembers);
final Set<MethodInfo> removedOverridableMethods;
if(!removedMethods.isEmpty())
{
removedOverridableMethods = new HashSet<MethodInfo>(removedMethods);
for(Iterator<MethodInfo> it = removedOverridableMethods.iterator(); it.hasNext(); )
{
final MethodInfo method = it.next();
if(method.isFinal() || method.isStatic() || method.isPrivate() || method.isConstructor())
{
it.remove();
}
}
}
else
{
removedOverridableMethods = Collections.emptySet();
}
myJavaDependencyCache.getCacheNavigator().walkSubClasses(myQName, new ClassInfoProcessor()
{
public boolean process(final int subclassQName) throws CacheCorruptedException
{
if(myJavaDependencyCache.isClassInfoMarked(subclassQName))
{
return true;
}
if(!oldCache.containsClass(subclassQName))
{
return true;
}
if(!removedMethods.isEmpty() && myIsRemoteInterface && !JavaMakeUtil.isInterface(oldCache.getFlags(subclassQName)))
{
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) +
"; reason: methods were removed from remote interface: " + myJavaDependencyCache.resolve(myQName));
}
}
return true;
}
if(mySuperClassAdded || mySuperInterfaceAdded)
{
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; reason: the superlist of " +
myJavaDependencyCache.resolve(myQName) + " is changed");
}
}
return true;
}
// if info became final, mark direct inheritors
if(becameFinal)
{
if(myQName == oldCache.getSuperQualifiedName(subclassQName))
{
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; reason: the class " +
myJavaDependencyCache.resolve(myQName) + " was made final");
}
}
return true;
}
}
// process added members
for(final MemberInfo member : myAddedMembers)
{
if(member instanceof MethodInfo)
{
final MethodInfo method = (MethodInfo) member;
if(method.isAbstract())
{
// all derived classes should be marked in case an abstract method was added
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; reason: added abstract method to " +
myJavaDependencyCache.resolve(myQName));
}
}
return true;
}
if(!method.isPrivate())
{
final MethodInfo derivedMethod = oldCache.findMethodsBySignature(subclassQName, method.getDescriptor(symbolTable), symbolTable);
if(derivedMethod != null)
{
if(!method.getReturnTypeDescriptor(symbolTable).equals(derivedMethod.getReturnTypeDescriptor(symbolTable)))
{
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; reason: return types of method " +
method + " in base and derived classes are different");
}
}
return true;
}
if(JavaMakeUtil.isMoreAccessible(method.getFlags(), derivedMethod.getFlags()))
{
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; reason: the method " + method +
" in derived class is less accessible than in base class");
}
}
return true;
}
if(!method.isStatic() && derivedMethod.isStatic())
{
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; reason: the method " + method +
" in derived class is static, but added method in the base class is not");
}
}
return true;
}
if(method.isFinal() && !derivedMethod.isFinal())
{
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; reason: the method " + method +
" in base class is final, but in derived class is not");
}
}
return true;
}
if(!CacheUtils.areArraysContentsEqual(method.getThrownExceptions(), derivedMethod.getThrownExceptions()))
{
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; reason: exception lists of " +
method + " in base and derived classes are different");
}
}
return true;
}
}
if(hasGenericsNameClashes(method, oldCache, subclassQName))
{
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) +
"; reason: found method with the same name, different generic signature, but the same erasure as " + method);
}
}
return true;
}
}
}
else if(member instanceof FieldInfo)
{
if(oldCache.findFieldByName(subclassQName, member.getName()) != null)
{
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; reason: added field " + member +
" to base class");
}
}
return true;
}
}
}
// process changed members
for(final MemberInfo changedMember : myChangedMembers)
{
if(changedMember instanceof MethodInfo)
{
final MethodInfo oldMethod = (MethodInfo) changedMember;
MethodChangeDescription changeDescription = (MethodChangeDescription) myChangeDescriptions.get(oldMethod);
if(changeDescription.becameAbstract)
{
if(!ClsUtil.isAbstract(oldCache.getFlags(subclassQName)))
{ // if the subclass was not abstract
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug(
"Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; reason: changed base method " + oldMethod);
}
}
return true;
}
}
final String oldMethodDescriptor = oldMethod.getDescriptor(symbolTable);
final MethodInfo derivedMethod = oldCache.findMethodsBySignature(subclassQName, oldMethodDescriptor, symbolTable);
if(derivedMethod != null)
{
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; reason: changed base method " + oldMethod);
}
}
return true;
}
// now check if the changed method is compatible with methods declared in implemented interfaces of subclasses
myJavaDependencyCache.getCacheNavigator().walkSuperInterfaces(subclassQName, new ClassInfoProcessor()
{
boolean found = false;
public boolean process(final int ifaceQName) throws CacheCorruptedException
{
if(found)
{
return false;
}
final MethodInfo implementee = oldCache.findMethodsBySignature(ifaceQName, oldMethodDescriptor, symbolTable);
if(implementee != null)
{
found = true;
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; reason: changed base method, implementing corresponding method " +
"inherited from an interface" + oldMethod);
}
}
}
return !found;
}
});
if(myJavaDependencyCache.isClassInfoMarked(subclassQName))
{
return true;
}
}
}
if(!ClsUtil.isAbstract(oldCache.getFlags(subclassQName)))
{
if(hasUnimplementedAbstractMethods(subclassQName, new HashSet<MemberInfo>(removedConcreteMethods)))
{
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent class " + myJavaDependencyCache.resolve(subclassQName) + "; reason: the class should be declared abstract because abstract method " +
"implementation was removed from its superclass: " +
myJavaDependencyCache.resolve(myQName));
}
}
return true;
}
}
if(!removedOverridableMethods.isEmpty() && !myJavaDependencyCache.isClassInfoMarked(subclassQName) && !myJavaDependencyCache.getNewClassesCache().containsClass(subclassQName) /*not
compiled in this session*/)
{
final Cache cache = myJavaDependencyCache.getCache();
for(MethodInfo subclassMethod : cache.getMethods(subclassQName))
{
if(!subclassMethod.isConstructor())
{
for(MethodInfo removedMethod : removedOverridableMethods)
{
if(removedMethod.getName() == subclassMethod.getName() /*todo: check param signatures here for better accuracy*/)
{
// got it
if(myJavaDependencyCache.markClass(subclassQName))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Mark dependent subclass " + myJavaDependencyCache.resolve(subclassQName) + "; reason: the class has methods annotated with @Override and some" +
" " +
"methods were changed or removed in a base class" +
myJavaDependencyCache.resolve(myQName));
}
}
return true;
}
}
}
}
}
// end of subclass processor
return true;
}
});
}
private static boolean hasGenericsNameClashes(final MethodInfo baseMethod, final Cache oldCache, final int subclassQName) throws CacheCorruptedException
{
// it is illegal if 2 methods in a hierarchy have 1) same name 2) different signatures 3) same erasure
final List<MethodInfo> methods = oldCache.findMethodsByName(subclassQName, baseMethod.getName());
if(methods.size() > 0)
{
for(final MethodInfo methodInSubclass : methods)
{
if(ClsUtil.isBridge(methodInSubclass.getFlags()))
{
continue;
}
if(baseMethod.getDescriptor() == methodInSubclass.getDescriptor() && baseMethod.getGenericSignature() != methodInSubclass.getGenericSignature())
{
return true;
}
}
}
return false;
}
private static Set<MemberInfo> fetchNonAbstractMethods(Set<MemberInfo> membersToCheck)
{
final Set<MemberInfo> methodsToCheck = new HashSet<MemberInfo>();
for(final Object aMembersToCheck : membersToCheck)
{
final MemberInfo memberInfo = (MemberInfo) aMembersToCheck;
if(memberInfo instanceof MethodInfo)
{
final MethodInfo methodInfo = (MethodInfo) memberInfo;
if(!methodInfo.isAbstract() && !methodInfo.isConstructor())
{
methodsToCheck.add(memberInfo);
}
}
}
return methodsToCheck;
}
private boolean hasUnimplementedAbstractMethods(int superQName, final Set methodsToCheck) throws CacheCorruptedException
{
if(myJavaDependencyCache.getCache().containsClass(superQName))
{
return hasBaseAbstractMethods(superQName, methodsToCheck) ||
hasBaseAbstractMethodsInHierarchy(superQName, methodsToCheck);
}
else
{
final String qName = myJavaDependencyCache.resolve(superQName);
if(!JavaClassNames.JAVA_LANG_OBJECT.equals(qName))
{
if(hasBaseAbstractMethods2(qName, methodsToCheck))
{
return true;
}
}
}
return false;
}
private boolean hasBaseAbstractMethodsInHierarchy(int fromClassQName, final Set methodsToCheck) throws CacheCorruptedException
{
if(fromClassQName == Cache.UNKNOWN || methodsToCheck.isEmpty())
{
return false;
}
final Cache cache = myJavaDependencyCache.getCache();
int superName = cache.getSuperQualifiedName(fromClassQName);
if(superName != Cache.UNKNOWN)
{
if(hasUnimplementedAbstractMethods(superName, methodsToCheck))
{
return true;
}
}
if(methodsToCheck.isEmpty())
{
return false;
}
int[] superInterfaces = cache.getSuperInterfaces(fromClassQName);
for(int superInterface : superInterfaces)
{
if(hasUnimplementedAbstractMethods(superInterface, methodsToCheck))
{
return true;
}
}
return false;
}
private boolean hasBaseAbstractMethods(int qName, Set methodsToCheck) throws CacheCorruptedException
{
final SymbolTable symbolTable = myJavaDependencyCache.getSymbolTable();
final Cache oldCache = myJavaDependencyCache.getCache();
final Cache newCache = myJavaDependencyCache.getNewClassesCache();
final Cache cache = newCache.containsClass(qName) ? newCache : oldCache; // use recompiled version (if any) for searching methods
for(Iterator it = methodsToCheck.iterator(); it.hasNext(); )
{
final MethodInfo methodInfo = (MethodInfo) it.next();
final MethodInfo superMethod = cache.findMethodsBySignature(qName, methodInfo.getDescriptor(symbolTable), symbolTable);
if(superMethod != null)
{
if(ClsUtil.isAbstract(superMethod.getFlags()))
{
return true;
}
it.remove();
}
}
return false;
}
// search using PSI
private boolean hasBaseAbstractMethods2(final String qName, final Set methodsToCheck) throws CacheCorruptedException
{
final boolean[] found = {false};
final CacheCorruptedException ex = ApplicationManager.getApplication().runReadAction(new Computable<CacheCorruptedException>()
{
public CacheCorruptedException compute()
{
try
{
final PsiManager psiManager = PsiManager.getInstance(myProject);
final PsiClass aClass = JavaPsiFacade.getInstance(psiManager.getProject()).findClass(qName, GlobalSearchScope.allScope(myProject));
if(aClass == null)
{
return null;
}
final PsiElementFactory factory = JavaPsiFacade.getInstance(psiManager.getProject()).getElementFactory();
final PsiNameHelper nameHelper = PsiNameHelper.getInstance(myProject);
for(Iterator it = methodsToCheck.iterator(); it.hasNext(); )
{
final MethodInfo methodInfo = (MethodInfo) it.next();
if(!nameHelper.isIdentifier(myJavaDependencyCache.resolve(methodInfo.getName()), LanguageLevel.JDK_1_3))
{ // fix for SCR 16068
continue;
}
// language level 1.3 will prevent exceptions from PSI if there are methods named "assert"
final PsiMethod methodPattern = factory.createMethodFromText(getMethodText(methodInfo), null, LanguageLevel.JDK_1_3);
final PsiMethod superMethod = aClass.findMethodBySignature(methodPattern, true);
if(superMethod != null)
{
if(superMethod.hasModifierProperty(PsiModifier.ABSTRACT))
{
found[0] = true;
return null;
}
it.remove();
}
}
}
catch(IncorrectOperationException e)
{
LOG.error(e);
}
catch(CacheCorruptedException e)
{
return e;
}
return null;
}
});
if(ex != null)
{
throw ex;
}
return found[0];
}
@SuppressWarnings({"HardCodedStringLiteral"})
private
@NonNls
String getMethodText(MethodInfo methodInfo) throws CacheCorruptedException
{
final SymbolTable symbolTable = myJavaDependencyCache.getSymbolTable();
StringBuilder text = new StringBuilder(16);
final String returnType = signatureToSourceTypeName(methodInfo.getReturnTypeDescriptor(symbolTable));
text.append(returnType);
text.append(" ");
text.append(myJavaDependencyCache.resolve(methodInfo.getName()));
text.append("(");
final String[] parameterSignatures = methodInfo.getParameterDescriptors(symbolTable);
for(int idx = 0; idx < parameterSignatures.length; idx++)
{
String parameterSignature = parameterSignatures[idx];
if(idx > 0)
{
text.append(",");
}
text.append(signatureToSourceTypeName(parameterSignature));
text.append(" arg");
text.append(idx);
}
text.append(")");
return text.toString();
}
private static boolean wereInterfacesRemoved(int[] oldInterfaces, int[] newInterfaces)
{
for(int oldInterface : oldInterfaces)
{
boolean found = false;
for(int newInterface : newInterfaces)
{
found = oldInterface == newInterface;
if(found)
{
break;
}
}
if(!found)
{
return true;
}
}
return false;
}
/**
* @return a map [fieldName->FieldInfo]
*/
private static IntObjectMap<FieldInfo> getFieldInfos(Cache cache, int qName) throws CacheCorruptedException
{
final IntObjectMap<FieldInfo> map = IntMaps.newIntObjectHashMap();
for(FieldInfo fieldInfo : cache.getFields(qName))
{
map.put(fieldInfo.getName(), fieldInfo);
}
return map;
}
/**
* @return a map [methodSignature->MethodInfo]
*/
private Map<String, MethodInfoContainer> getMethodInfos(final MethodInfo[] methods) throws CacheCorruptedException
{
final Map<String, MethodInfoContainer> map = new HashMap<String, MethodInfoContainer>();
final SymbolTable symbolTable = myJavaDependencyCache.getSymbolTable();
for(MethodInfo methodInfo : methods)
{
final String signature = methodInfo.getDescriptor(symbolTable);
final MethodInfoContainer currentValue = map.get(signature);
// covariant methods have the same signature, so there might be several MethodInfos for one key
if(currentValue == null)
{
map.put(signature, new MethodInfoContainer(methodInfo));
}
else
{
currentValue.add(methodInfo);
}
}
return map;
}
private static void addAddedMembers(IntObjectMap<FieldInfo> oldFields, Map<String, MethodInfoContainer> oldMethods,
IntObjectMap<FieldInfo> newFields, Map<String, MethodInfoContainer> newMethods,
Collection<MemberInfo> members)
{
newFields.forEach((fieldName, fieldInfo) ->
{
if(!oldFields.containsKey(fieldName))
{
members.add(fieldInfo);
}
});
for(final String signature : newMethods.keySet())
{
if(!oldMethods.containsKey(signature))
{
members.addAll(newMethods.get(signature).getMethods());
}
}
}
private static void addRemovedMembers(IntObjectMap<FieldInfo> oldFields, Map<String, MethodInfoContainer> oldMethods,
IntObjectMap<FieldInfo> newFields, Map<String, MethodInfoContainer> newMethods,
Collection<MemberInfo> members)
{
addAddedMembers(newFields, newMethods, oldFields, oldMethods, members);
}
private void addChangedMembers(IntObjectMap<FieldInfo> oldFields, Map<String, MethodInfoContainer> oldMethods,
IntObjectMap<FieldInfo> newFields, Map<String, MethodInfoContainer> newMethods,
Collection<MemberInfo> members) throws CacheCorruptedException
{
oldFields.forEach((fieldName, oldInfo) ->
{
final FieldInfo newInfo = newFields.get(fieldName);
if(newInfo != null)
{
final FieldChangeDescription changeDescription = new FieldChangeDescription(oldInfo, newInfo);
if(changeDescription.isChanged())
{
members.add(oldInfo);
myChangeDescriptions.put(oldInfo, changeDescription);
}
}
});
if(!oldMethods.isEmpty())
{
final SymbolTable symbolTable = myJavaDependencyCache.getSymbolTable();
final Set<MethodInfo> processed = new HashSet<MethodInfo>();
for(final String signature : oldMethods.keySet())
{
final MethodInfoContainer oldMethodsContainer = oldMethods.get(signature);
final MethodInfoContainer newMethodsContainer = newMethods.get(signature);
if(newMethodsContainer != null)
{
processed.clear();
if(oldMethodsContainer.size() == newMethodsContainer.size())
{
// first, process all corresponding method infos
for(MethodInfo oldInfo : oldMethodsContainer.getMethods())
{
MethodInfo _newInfo = null;
for(MethodInfo newInfo : newMethodsContainer.getMethods())
{
if(oldInfo.isNameAndDescriptorEqual(newInfo))
{
_newInfo = newInfo;
break;
}
}
if(_newInfo != null)
{
processed.add(oldInfo);
processed.add(_newInfo);
final MethodChangeDescription changeDescription = new MethodChangeDescription(oldInfo, _newInfo, symbolTable);
if(changeDescription.isChanged())
{
members.add(oldInfo);
myChangeDescriptions.put(oldInfo, changeDescription);
}
}
}
}
// processing the rest of infos, each pair
for(MethodInfo oldInfo : oldMethodsContainer.getMethods())
{
if(processed.contains(oldInfo))
{
continue;
}
for(MethodInfo newInfo : newMethodsContainer.getMethods())
{
if(processed.contains(newInfo))
{
continue;
}
final MethodChangeDescription changeDescription = new MethodChangeDescription(oldInfo, newInfo, symbolTable);
if(changeDescription.isChanged())
{
members.add(oldInfo);
myChangeDescriptions.put(oldInfo, changeDescription);
}
}
}
}
}
}
}
private boolean hasEquivalentMethod(Collection<MethodInfo> members, Dependency.MethodRef modelMethod) throws CacheCorruptedException
{
final String[] modelSignature = modelMethod.getParameterDescriptors(myJavaDependencyCache.getSymbolTable());
for(final MethodInfo method : members)
{
if(modelMethod.name != method.getName())
{
continue;
}
final String[] methodSignature = method.getParameterDescriptors(myJavaDependencyCache.getSymbolTable());
if(modelSignature.length != methodSignature.length)
{
continue;
}
for(int i = 0; i < methodSignature.length; i++)
{
if(!methodSignature[i].equals(modelSignature[i]))
{
if(LOG.isDebugEnabled())
{
LOG.debug("Equivalent: " + modelMethod.getDescriptor(myJavaDependencyCache.getSymbolTable()) + " <=> " +
method.getDescriptor(myJavaDependencyCache.getSymbolTable()));
}
return true;
}
}
}
return false;
}
private static
@NonNls
String signatureToSourceTypeName(String signature)
{
try
{
switch(signature.charAt(0))
{
case 'B':
return "byte";
case 'C':
return "char";
case 'D':
return "double";
case 'F':
return "float";
case 'I':
return "int";
case 'J':
return "long";
case 'L':
{ // Full class name
int index = signature.indexOf(';'); // Look for closing `;'
if(index < 0)
{
throw new RuntimeException("Invalid signature: " + signature);
}
return signature.substring(1, index).replace('/', '.');
}
case 'S':
return "short";
case 'Z':
return "boolean";
case '[':
{ // Array declaration
int n;
StringBuffer brackets;
String type;
brackets = new StringBuffer(); // Accumulate []'s
// Count opening brackets and look for optional size argument
for(n = 0; signature.charAt(n) == '['; n++)
{
brackets.append("[]");
}
// The rest of the string denotes a `<field_type>'
type = signatureToSourceTypeName(signature.substring(n));
return type + brackets.toString();
}
case 'V':
return "void";
default:
throw new RuntimeException("Invalid signature: `" +
signature + "'");
}
}
catch(StringIndexOutOfBoundsException e)
{ // Should never occur
throw new RuntimeException("Invalid signature: " + e + ":" + signature);
}
}
private Dependency[] getBackDependencies() throws CacheCorruptedException
{
if(myBackDependencies == null)
{
myBackDependencies = myJavaDependencyCache.getCache().getBackDependencies(myQName);
}
return myBackDependencies;
}
private static class MethodInfoContainer
{
private List<MethodInfo> myInfos = null;
protected MethodInfoContainer(MethodInfo info)
{
myInfos = Collections.singletonList(info);
}
public List<MethodInfo> getMethods()
{
return myInfos;
}
public int size()
{
return myInfos.size();
}
public void add(MethodInfo info)
{
if(myInfos.size() == 1)
{
myInfos = new ArrayList<MethodInfo>(myInfos);
}
myInfos.add(info);
}
}
}
| |
/* See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Esri Inc. licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.esri.gpt.framework.context;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.lucene.search.BooleanQuery;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import com.esri.gpt.catalog.arcims.ImsService;
import com.esri.gpt.catalog.context.CatalogConfiguration;
import com.esri.gpt.catalog.lucene.LuceneIndexObserver;
import com.esri.gpt.catalog.lucene.LuceneIndexObserverInfo;
import com.esri.gpt.catalog.lucene.ParserAdaptorInfo;
import com.esri.gpt.catalog.lucene.ParserAdaptorInfos;
import com.esri.gpt.catalog.search.MapViewerConfigs;
import com.esri.gpt.catalog.search.SearchConfig;
import com.esri.gpt.control.download.DownloadConfiguration;
import com.esri.gpt.control.download.ItemInfo;
import com.esri.gpt.control.georss.DcatField;
import com.esri.gpt.control.georss.DcatFields;
import com.esri.gpt.control.georss.DcatSchemas;
import com.esri.gpt.control.webharvest.engine.DataProcessorFactory;
import com.esri.gpt.control.webharvest.engine.HarvesterConfiguration;
import com.esri.gpt.control.webharvest.engine.LocalDataProcessorFactory;
import com.esri.gpt.control.webharvest.extensions.localfolder.LocalFolderDataProcessorFactory;
import com.esri.gpt.control.webharvest.protocol.ProtocolFactories;
import com.esri.gpt.control.webharvest.protocol.ProtocolFactory;
import com.esri.gpt.control.webharvest.protocol.ProtocolInitializer;
import com.esri.gpt.control.webharvest.validator.IValidatorFactory;
import com.esri.gpt.control.webharvest.validator.ValidatorFactory;
import com.esri.gpt.framework.collection.StringAttribute;
import com.esri.gpt.framework.collection.StringAttributeMap;
import com.esri.gpt.framework.http.HttpClientRequest;
import com.esri.gpt.framework.mail.MailConfiguration;
import com.esri.gpt.framework.scheduler.ThreadSchedulerConfiguration;
import com.esri.gpt.framework.security.codec.PC1_Encryptor;
import com.esri.gpt.framework.security.credentials.ProxyAuthenticator;
import com.esri.gpt.framework.security.credentials.UsernamePasswordCredentials;
import com.esri.gpt.framework.security.identity.IdentityConfiguration;
import com.esri.gpt.framework.security.identity.IdentitySupport;
import com.esri.gpt.framework.security.identity.SingleSignOnMechanism;
import com.esri.gpt.framework.security.identity.agp.PortalIdentityAdapter;
import com.esri.gpt.framework.security.identity.ldap.LdapConfiguration;
import com.esri.gpt.framework.security.identity.ldap.LdapConnectionProperties;
import com.esri.gpt.framework.security.identity.ldap.LdapGroupProperties;
import com.esri.gpt.framework.security.identity.ldap.LdapUserProperties;
import com.esri.gpt.framework.security.identity.open.OpenProvider;
import com.esri.gpt.framework.security.metadata.MetadataAccessPolicy;
import com.esri.gpt.framework.security.principal.Group;
import com.esri.gpt.framework.security.principal.Role;
import com.esri.gpt.framework.security.principal.Roles;
import com.esri.gpt.framework.security.principal.UserAttribute;
import com.esri.gpt.framework.security.principal.UserAttributeMap;
import com.esri.gpt.framework.sql.DatabaseReference;
import com.esri.gpt.framework.util.LogUtil;
import com.esri.gpt.framework.util.TimePeriod;
import com.esri.gpt.framework.util.Val;
import com.esri.gpt.framework.xml.DomUtil;
import com.esri.gpt.framework.xml.NodeListAdapter;
import com.esri.gpt.framework.xml.XmlIoUtil;
import org.apache.commons.lang3.StringUtils;
/**
* Application configuration loader.
* <p>
* Loads the primary configuration for an application based upon the XML content
* defined by the primary configuration source.
*
* @see ApplicationConfiguration
*/
public class ApplicationConfigurationLoader {
private static final Logger LOG = Logger.getLogger(ApplicationConfigurationLoader.class.getName());
/** Main XML configuration file location. */
private static final String MAIN_FILE = "gpt/config/gpt.xml";
private static final String MAIN_FILE_DEV = "gpt/config/gpt_dev.xml";
/** Default constructor. */
public ApplicationConfigurationLoader() {
}
// properties ==================================================================
/**
* Gets the logger.
*
* @return the logger
*/
private Logger getLogger() {
return LogUtil.getLogger();
}
// methods =====================================================================
/**
* Starts the configuration loading process.
*
* @param appConfig
* the primary application configuration
* @throws Exception
*/
public void load(ApplicationConfiguration appConfig) throws Exception {
// load the dom
String sConfigFile = null;
Document dom = null;
try {
sConfigFile = MAIN_FILE_DEV;
dom = DomUtil.makeDomFromResourcePath(sConfigFile, false);
getLogger().log(Level.FINE, "Loaded configuration file: {0}", sConfigFile);
} catch (Throwable e) {
// Dev config not found
}
if (dom == null) {
sConfigFile = MAIN_FILE;
getLogger().log(Level.FINE, "Loading configuration file: {0}", sConfigFile);
dom = DomUtil.makeDomFromResourcePath(sConfigFile, false);
}
XPath xpath = XPathFactory.newInstance().newXPath();
if (dom!=null) {
LOG.finer(XmlIoUtil.domToString(dom));
}
try {
Node root = (Node) xpath.evaluate("/gptConfig", dom, XPathConstants.NODE);
appConfig.setVersion(xpath.evaluate("@version", root));
// load configurations
loadDatabase(appConfig, dom, root);
loadIdentity(appConfig, dom, root);
loadMail(appConfig, dom, root);
loadInteractiveMap(appConfig, dom, root);
loadCatalog(appConfig, dom, root);
loadScheduler(appConfig, dom, root);
loadDownloadData(appConfig, dom, root);
loadHarvesterConfiguration(appConfig, dom, root);
loadProtocolFactories(appConfig, dom, root);
// forward proxy authentication
Node ndProxyAuth = (Node) xpath.evaluate("forwardProxyAuth", root, XPathConstants.NODE);
if (ndProxyAuth != null) {
String sUser = xpath.evaluate("@username", ndProxyAuth);
String sPwd = xpath.evaluate("@password", ndProxyAuth);
boolean bEncrypted = Val.chkBool(xpath.evaluate("@encrypted", ndProxyAuth), false);
boolean bSetSystemProperties = Val.chkBool(xpath.evaluate("@setSystemProperties", ndProxyAuth), true);
if (bEncrypted) {
try {
String sDecrypted = PC1_Encryptor.decrypt(sPwd);
sPwd = sDecrypted;
} catch (Exception e) {
this.getLogger().log(Level.SEVERE, "The forwardProxyAuth password failed to decrypt.", e);
}
}
if ((sUser != null) && (sUser.length() > 0) && (sPwd != null) && (sPwd.length() > 0)) {
ProxyAuthenticator.setDefault(sUser, sPwd);
// set system properties
if (bSetSystemProperties) {
try {
if (System.getProperty("http.proxyUser") == null) {
System.setProperty("http.proxyUser", sUser);
System.setProperty("http.proxyPassword", sPwd);
}
if (System.getProperty("https.proxyUser") == null) {
System.setProperty("https.proxyUser", sUser);
System.setProperty("https.proxyPassword", sPwd);
}
} catch (Exception e) {
this.getLogger().log(Level.SEVERE, "Error setting system properties for forward proxy authentication.", e);
}
}
}
}
} catch (XPathExpressionException e) {
e.printStackTrace(System.err);
}
StringAttributeMap params = appConfig.getCatalogConfiguration().getParameters();
String param = Val.chkStr(params.getValue("catalog.echoConfigOnStartup"));
boolean bEchoConfig = !param.equalsIgnoreCase("false");
if (bEchoConfig) {
getLogger().info(appConfig.toString());
}
}
/**
* Loads the catalog configuration.
*
* @param appConfig
* the primary application configuration
* @param dom
* the configuration document
* @param root
* the root node for the document
* @throws Exception
*/
private void loadCatalog(ApplicationConfiguration appConfig, Document dom,
Node root) throws Exception {
XPath xpath = XPathFactory.newInstance().newXPath();
// catalog configuration
Node ndCat = (Node) xpath.evaluate("catalog", root, XPathConstants.NODE);
if (ndCat != null) {
CatalogConfiguration cfg = appConfig.getCatalogConfiguration();
cfg.getParameters().clear();
ImsService publish = cfg.getArcImsCatalog().getPublishService();
ImsService browse = cfg.getArcImsCatalog().getBrowseService();
cfg.setTablePrefix(Val.chkStr(xpath.evaluate("@gptTablePrefix", ndCat),"GPT_"));
cfg.setMvsTablePrefix(Val.chkStr(xpath.evaluate("@mvsTablePrefix", ndCat),"MVS_"));
publish.setServerUrl(xpath.evaluate("@metadataServerUrl", ndCat));
publish.setServiceName(Val.chkStr(xpath.evaluate("@metadataServerPublishService",ndCat), "GPT_Publish_Metadata"));
publish.setTimeoutMillisecs(Val.chkInt(xpath.evaluate("@metadataServerTimeoutMillisecs", ndCat), 0));
browse.setServerUrl(publish.getServerUrl());
browse.setServiceName(Val.chkStr(xpath.evaluate("@metadataServerBrowseService", ndCat),"GPT_Browse_Metadata"));
browse.setTimeoutMillisecs(publish.getTimeoutMillisecs());
// additional parameters
populateParameters(cfg.getParameters(), ndCat);
//load dcat fields
if(cfg.getParameters().containsKey("dcat.mappings")){
loadDcatMappings(cfg.getDcatSchemas(),cfg.getParameters().get("dcat.mappings").getValue());
}
// parse http timeouts
String connectionTimeout = cfg.getParameters().getValue("httpClientRequest.connectionTimeout");
String responseTimeout = cfg.getParameters().getValue("httpClientRequest.responseTimeout");
// set http timeouts
cfg.setConnectionTimeMs((int)parsePeriod(connectionTimeout, HttpClientRequest.DEFAULT_CONNECTION_TIMEOUT).getValue());
cfg.setResponseTimeOutMs((int)parsePeriod(responseTimeout , HttpClientRequest.DEFAULT_RESPONSE_TIMEOUT).getValue());
}
// search configuration
Node ndSearch = (Node) xpath.evaluate("catalog/search", root,
XPathConstants.NODE);
SearchConfig sCfg = appConfig.getCatalogConfiguration().getSearchConfig();
sCfg.setSearchConfigNode(ndSearch);
if (ndSearch != null) {
sCfg.setResultsReviewsShown(
Val.chkStr(xpath.evaluate("@searchResultsReviewsShown", ndSearch)));
sCfg.setResultsPerPage(xpath.evaluate("@searchResultsPerPage", ndSearch));
sCfg.setMaxSavedSearches(xpath.evaluate("@maxSavedSearches", ndSearch));
sCfg.setCswProfile(
Val.chkStr(xpath.evaluate("@cswServletUrlProfile", ndSearch),"urn:ogc:CSW:2.0.2:HTTP:OGCCORE:ESRI:GPT"));
sCfg.setSearchUri(xpath.evaluate("@cswServletUrl", ndSearch));
sCfg.setTimeOut(xpath.evaluate("@searchTimeoutMillisecs", ndSearch));
sCfg.setDistributedSearchMaxSelectedSites(
Val.chkStr(xpath.evaluate("@distributedSearchMaxSelectedSites",
ndSearch)));
sCfg.setDistributedSearchTimeoutMillisecs(
Val.chkStr(xpath.evaluate("@distributedSearchTimeoutMillisecs",
ndSearch)));
sCfg.setAllowExternalSearch(Val.chkBool(xpath.evaluate(
"@allowExternalSiteSearch", ndSearch), false));
sCfg.setAllowTemporalSearch(Val.chkBool(xpath.evaluate("@allowTemporalSearch",ndSearch),false));
sCfg.setJsfSuffix(Val.chkStr(xpath.evaluate(
"@jsfSuffix", ndSearch)));
sCfg.setGptToCswXsltPath(xpath.evaluate("@gpt2cswXslt", ndSearch));
sCfg.setMapViewerUrl(Val.chkStr(xpath.evaluate("@mapViewerUrl", ndSearch),""));
sCfg.validate();
}
NodeList nodes = (NodeList) xpath.evaluate(
"catalog/search/repositories/repository",
root, XPathConstants.NODESET);
NodeList nodeList = nodes;
LinkedHashMap<String, Map<String, String>> sFactory =
new LinkedHashMap<String, Map<String, String>>();
Map<String, String> attributes =
new TreeMap<String, String>(String.CASE_INSENSITIVE_ORDER);
/*attributes.put("key", "local");
attributes.put("class", "com.esri.gpt.catalog.search.SearchEngineLocal");
attributes.put("resourceKey", "catalog.search.searchSite.defaultsite");
attributes.put("labelResourceKey", "catalog.search.searchSite.defaultsite");
attributes.put("abstractResourceKey", "catalog.search.searchSite.defaultsite.abstract");
sFactory.put("local", attributes);*/
for (int i = 0; nodeList != null && i < nodeList.getLength(); i++) {
ndSearch = nodeList.item(i);
attributes =
new TreeMap<String, String>(String.CASE_INSENSITIVE_ORDER);
NamedNodeMap nnm = ndSearch.getAttributes();
for (int j = 0; nnm != null && j < nnm.getLength(); j++) {
Node nd = nnm.item(j);
String key = Val.chkStr(nd.getNodeName());
String value = Val.chkStr(nd.getNodeValue());
attributes.put(key, value);
if (key.equalsIgnoreCase("RESOURCEKEY")) {
attributes.put("RESOURCEKEY", value);
}
if (key.equalsIgnoreCase("labelResourceKey")) {
attributes.put("labelResourceKey", value);
}
if (key.equalsIgnoreCase("abstractResourceKey")) {
attributes.put("abstractResourceKey", value);
}
}
NodeList params = (NodeList) xpath.evaluate("parameter",
ndSearch, XPathConstants.NODESET);
for (int k = 0; params != null && k < params.getLength(); k++) {
String key = xpath.evaluate("@key", params.item(k));
String value = xpath.evaluate("@value", params.item(k));
attributes.put(Val.chkStr(key), Val.chkStr(value));
}
String key = Val.chkStr(xpath.evaluate("@key", ndSearch));
sFactory.put(key, attributes);
}
sCfg.setSearchFactoryRepos(sFactory);
// Mapviewer
ArrayList<MapViewerConfigs> mapViewerConfigs =
new ArrayList<MapViewerConfigs>();
nodes = (NodeList) xpath.evaluate("catalog/mapViewer/instance", root,
XPathConstants.NODESET);
for (int j = 0; nodes != null && j < nodes.getLength(); j++) {
MapViewerConfigs mvConfigs = new MapViewerConfigs();
Node nd = nodes.item(j);
mvConfigs.setClassName(Val.chkStr(xpath.evaluate("@className", nd),"com.esri.gpt.catalog.search.MapViewerFlex"));
mvConfigs.setUrl(xpath.evaluate("@url", nd));
NodeList pNodeList = (NodeList) xpath.evaluate("parameter", nd, XPathConstants.NODESET);
for (int k = 0; pNodeList != null && k < pNodeList.getLength(); k++) {
String key = xpath.evaluate("@key", pNodeList.item(k));
String value = xpath.evaluate("@value", pNodeList.item(k));
if (key != null || value != null) {
mvConfigs.addParameter(key, value);
}
}
mapViewerConfigs.add(mvConfigs);
}
sCfg.setMapViewerInstances(mapViewerConfigs);
// Lucene configuration
Node ndLucene = (Node) xpath.evaluate("catalog/lucene", root, XPathConstants.NODE);
if (ndLucene != null) {
CatalogConfiguration cfg = appConfig.getCatalogConfiguration();
cfg.getLuceneConfig().setIndexLocation(
xpath.evaluate("@indexLocation", ndLucene));
cfg.getLuceneConfig().setWriteLockTimeout(
Val.chkInt(xpath.evaluate("@writeLockTimeout", ndLucene), -1));
cfg.getLuceneConfig().setUseNativeFSLockFactory(
Val.chkStr(xpath.evaluate("@useNativeFSLockFactory", ndLucene)).equalsIgnoreCase("true"));
cfg.getLuceneConfig().setAnalyzerClassName(
xpath.evaluate("@analyzerClassName", ndLucene));
cfg.getLuceneConfig().setUseConstantScoreQuery(
Val.chkBool(xpath.evaluate("@useConstantScoreQuery", ndLucene), false));
cfg.getLuceneConfig().setMaxClauseCount(
Val.chkInt(xpath.evaluate("@maxClauseCount", ndLucene), BooleanQuery.getMaxClauseCount()));
ParserAdaptorInfos infos = new ParserAdaptorInfos();
NodeList ndLstProxies = (NodeList) xpath.evaluate("adaptor", ndLucene,
XPathConstants.NODESET);
for (int i = 0; i < ndLstProxies.getLength(); i++) {
Node ndProxy = ndLstProxies.item(i);
String proxyName = xpath.evaluate("@name", ndProxy);
String proxyClassName = xpath.evaluate("@className", ndProxy);
ParserAdaptorInfo info = new ParserAdaptorInfo();
info.setName(proxyName);
info.setClassName(proxyClassName);
NodeList ndListProps = (NodeList) xpath.evaluate("attribute", ndProxy,
XPathConstants.NODESET);
for (int p = 0; p < ndListProps.getLength(); p++) {
Node ndProp = ndListProps.item(p);
String key = xpath.evaluate("@key", ndProp);
String value = xpath.evaluate("@value", ndProp);
info.getAttributes().set(key, value);
}
infos.add(info);
}
cfg.getLuceneConfig().setParserProxies(infos.createParserProxies());
NodeList ndObservers = (NodeList) xpath.evaluate("observer", ndLucene, XPathConstants.NODESET);
for (Node ndObserver: new NodeListAdapter(ndObservers)) {
LuceneIndexObserverInfo info = new LuceneIndexObserverInfo();
info.setClassName(Val.chkStr(xpath.evaluate("@className", ndObserver)));
NodeList ndListProps = (NodeList) xpath.evaluate("attribute", ndObserver, XPathConstants.NODESET);
for (Node ndAttribute: new NodeListAdapter(ndListProps)) {
String key = xpath.evaluate("@key", ndAttribute);
String value = xpath.evaluate("@value", ndAttribute);
info.getAttributes().set(key, value);
}
LuceneIndexObserver observer = info.createObserver();
if (observer!=null) {
cfg.getLuceneConfig().getObservers().add(observer);
}
}
}
loadMetadataAccessPolicyConfiguration(appConfig, root);
StringAttributeMap params = appConfig.getCatalogConfiguration().getParameters();
String param = Val.chkStr(params.getValue("catalog.loadSchemasOnStartup"));
boolean bLoadSchemas = param.equalsIgnoreCase("true");
if (bLoadSchemas) {
appConfig.getCatalogConfiguration().getConfiguredSchemas();
}
}
/**
* Load configured dcat mappings
* @param dcatSchemas the configured dcat schemas
* @param dcatMappings the path to dcat mapping file
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
*/
private void loadDcatMappings(DcatSchemas dcatSchemas, String dcatMappings) throws Exception {
getLogger().log(Level.FINE, "Loading dcat mapping file: {0}", dcatMappings);
Document dom = DomUtil.makeDomFromResourcePath(dcatMappings, false);
XPath xpath = XPathFactory.newInstance().newXPath();
Node dcat = (Node) xpath.evaluate("/dcat", dom, XPathConstants.NODE);
NodeList fields = (NodeList) xpath.evaluate("fields", dcat, XPathConstants.NODESET);
if(fields != null){
for (int j = 0; j < fields.getLength(); j++) {
Node fld = fields.item(j);
DcatFields dcatFields = new DcatFields();
String schema = xpath.evaluate("@schema", fld);
NodeList flds = (NodeList) xpath.evaluate("field", fld, XPathConstants.NODESET);
for (int i = 0; i < flds.getLength(); i++) {
Node field = flds.item(i);
DcatField df = new DcatField();
String name = xpath.evaluate("@name", field);
df.setName(name);
df.setType(xpath.evaluate("@type", field));
String strIndex = Val.chkStr(xpath.evaluate("@index", field));
List<List<String>> lstIndex = new ArrayList<List<String>>();
for (String strChain: strIndex.split(";")) {
strChain = Val.chkStr(strChain);
if (strChain.isEmpty()) continue;
List<String> lstChain = new ArrayList<String>();
for (String strItem: strChain.split("\\+")) {
strItem = Val.chkStr(strItem);
if (strItem.isEmpty()) continue;
lstChain.add(strItem);
}
lstIndex.add(lstChain);
}
df.setIndex(lstIndex);
df.setDateFormat(xpath.evaluate("@dateFormat", field));
String max = Val.chkStr(xpath.evaluate("@maxChars", field));
String required = Val.chkStr(xpath.evaluate("@required", field));
if(required.length() > 0){
df.setRequired(Boolean.parseBoolean(required));
}
if(max.length() > 0){
df.setMaxChars(Integer.parseInt(max));
}
df.setDelimiter(xpath.evaluate("@delimiter", field));
NodeList fldMapList = (NodeList) xpath.evaluate("map", field, XPathConstants.NODESET);
for (Node fldMap: new NodeListAdapter(fldMapList)) {
String from = Val.chkStr((String)xpath.evaluate("@from",fldMap,XPathConstants.STRING));
String to = Val.chkStr((String)xpath.evaluate("@to",fldMap,XPathConstants.STRING));
df.addMapping(from, to);
}
NodeList fldMediaList = (NodeList) xpath.evaluate("media", field, XPathConstants.NODESET);
for (Node fldMedia: new NodeListAdapter(fldMediaList)) {
String url = Val.chkStr((String)xpath.evaluate("@url",fldMedia,XPathConstants.STRING));
String mime = Val.chkStr((String)xpath.evaluate("@mime",fldMedia,XPathConstants.STRING));
df.addMedia(url, mime);
}
df.setJoinOperator((String)xpath.evaluate("@joinOperator",field,XPathConstants.STRING));
dcatFields.add(df);
}
dcatSchemas.put(schema, dcatFields);
}
}
}
/**
* Loads the acsess policies.
*
* @param appConfig
* the primary application configuration
* @param dom
* the configuration document
* @param root
* the root node for the document
* @throws Exception
* @throws Exception
*/
private void loadMetadataAccessPolicyConfiguration(
ApplicationConfiguration appConfig, Node root) throws Exception {
XPath xpath = XPathFactory.newInstance().newXPath();
MetadataAccessPolicy aclCfg = appConfig.getMetadataAccessPolicy();
Node ndPolicy = (Node) xpath.evaluate("catalog/metadataAccessPolicy", root,
XPathConstants.NODE);
if (ndPolicy != null) {
String type = xpath.evaluate("@type", ndPolicy);
aclCfg.setAccessPolicyType(type);
String accessToGroupDN = xpath.evaluate("@protectedGroupDN", ndPolicy);
if (accessToGroupDN != null && accessToGroupDN.trim().length() > 0) {
aclCfg.setAccessToGroupDN(accessToGroupDN);
}
}
}
/**
* Loads the database references.
*
* @param appConfig
* the primary application configuration
* @param dom
* the configuration document
* @param root
* the root node for the document
* @throws XPathExpressionException
* indicates a programming error, bad XPath
*/
private void loadDatabase(ApplicationConfiguration appConfig, Document dom,
Node root) throws XPathExpressionException {
XPath xpath = XPathFactory.newInstance().newXPath();
Node ndDb = (Node) xpath.evaluate("databaseReference", root,
XPathConstants.NODE);
if (ndDb != null) {
DatabaseReference dbRef = new DatabaseReference();
dbRef.setReferenceName(xpath.evaluate("@name", ndDb));
dbRef.setJndiName(xpath.evaluate("@jndiName", ndDb));
dbRef.setDirectDriverClassName(xpath.evaluate("@driver", ndDb));
dbRef.setDirectUrl(xpath.evaluate("@url", ndDb));
dbRef.setDirectUsername(xpath.evaluate("@username", ndDb));
dbRef.setDirectPassword(xpath.evaluate("@password", ndDb));
dbRef.getTags().add("default");
appConfig.getDatabaseReferences().add(dbRef, true);
}
}
/**
* Loads the identify configuration.
*
* @param appConfig
* the primary application configuration
* @param dom
* the configuration document
* @param root
* the root node for the document
* @throws XPathExpressionException
* indicates a programming error, bad XPath
*/
private void loadIdentity(ApplicationConfiguration appConfig, Document dom,
Node root) throws XPathExpressionException {
// prepare
XPath xpath = XPathFactory.newInstance().newXPath();
IdentityConfiguration idConfig = appConfig.getIdentityConfiguration();
LdapConfiguration ldapConfig = idConfig.getLdapConfiguration();
Node ndIdentity = (Node) xpath.evaluate("identity", root, XPathConstants.NODE);
if (ndIdentity == null) {
return;
}
// primary parameters
String sName = Val.chkStr(xpath.evaluate("@name", ndIdentity));
String sRealm = Val.chkStr(xpath.evaluate("@realm", ndIdentity));
String sEncKey = Val.chkStr(xpath.evaluate("@encKey", ndIdentity));
String sAdapterClass = Val.chkStr(xpath.evaluate("@adapterClassName", ndIdentity));
if (sName.length() == 0) {
sName = "Identity Configuration";
}
if (sRealm.length() == 0) {
sRealm = "Geoportal";
}
if (sAdapterClass.length() == 0) {
sAdapterClass = "com.esri.gpt.framework.security.identity.ldap.LdapIdentityAdapter";
}
idConfig.setName(sName);
idConfig.setRealm(sRealm);
idConfig.setEncKey(sEncKey);
// determine the adapter
Node ndSimple = (Node) xpath.evaluate("simpleAdapter", ndIdentity, XPathConstants.NODE);
Node ndPortal = (Node) xpath.evaluate("arcgisPortalAdapter", ndIdentity, XPathConstants.NODE);
Node ndLdap = (Node) xpath.evaluate("ldapAdapter", ndIdentity, XPathConstants.NODE);
if (ndSimple != null) {
ndLdap = null;
ndPortal = null;
sAdapterClass = "com.esri.gpt.framework.security.identity.local.SimpleIdentityAdapter";
} else if (ndPortal != null) {
ndLdap = null;
sAdapterClass = "com.esri.gpt.framework.security.identity.agp.PortalIdentityAdapter";
}
idConfig.setAdapterClassName(sAdapterClass);
// simple adapter configuration
if (ndSimple != null) {
// account
Node ndAccount = (Node) xpath.evaluate("account", ndSimple, XPathConstants.NODE);
if (ndAccount != null) {
String sUser = xpath.evaluate("@username", ndAccount);
String sPwd = xpath.evaluate("@password", ndAccount);
String sDN = "cn=" + sUser + ",ou=simpleadapter";
boolean bEncrypted = Val.chkBool(xpath.evaluate("@encrypted", ndAccount),
false);
if (bEncrypted) {
try {
String sDecrypted = PC1_Encryptor.decrypt(sPwd);
sPwd = sDecrypted;
} catch (Exception e) {
this.getLogger().log(Level.SEVERE,
"The simple account password failed to decrypt.", e);
}
}
UsernamePasswordCredentials creds = new UsernamePasswordCredentials( sUser, sPwd);
creds.setDistinguishedName(sDN);
idConfig.setCatalogAdminDN(creds.getDistinguishedName());
idConfig.getSimpleConfiguration().setServiceAccountCredentials(creds);
}
// roles
Node ndRoles = (Node) xpath.evaluate("roles", ndSimple, XPathConstants.NODE);
if (ndRoles != null) {
Roles roles = idConfig.getConfiguredRoles();
NodeList nlRoles = (NodeList) xpath.evaluate("role", ndRoles, XPathConstants.NODESET);
for (int i = 0; i < nlRoles.getLength(); i++) {
Node ndRole = nlRoles.item(i);
Role role = new Role();
role.setKey(xpath.evaluate("@key", ndRole));
roles.add(role);
}
for (Role role : roles.values()) {
role.buildFullRoleSet(roles);
}
}
}
// ArcGIS Portal adapter configuration
if (ndPortal != null) {
String appId = Val.chkStr(xpath.evaluate("@appId",ndPortal));
String authorizeUrl = Val.chkStr(xpath.evaluate("@authorizeUrl",ndPortal));
String adminGroupId = Val.chkStr(xpath.evaluate("@gptAdministratorsGroupId",ndPortal));
String pubGroupId = Val.chkStr(xpath.evaluate("@gptPublishersGroupId",ndPortal));
if (appId.length() > 0) PortalIdentityAdapter.AppId = appId;
if (authorizeUrl.length() > 0) PortalIdentityAdapter.AuthorizeUrl = authorizeUrl;
if (adminGroupId.length() > 0) PortalIdentityAdapter.GptAdministratorsGroupId = adminGroupId;
if (pubGroupId.length() > 0) PortalIdentityAdapter.GptPublishersGroupId = pubGroupId;
PortalIdentityAdapter.ExpirationMinutes = Val.chkInt(xpath.evaluate("@expirationMinutes",ndPortal),120);
PortalIdentityAdapter.AllUsersCanPublish = Val.chkBool(xpath.evaluate("@allUsersCanPublish",ndPortal),false);
Roles roles = idConfig.getConfiguredRoles();
Role role = new Role("gptRegisteredUser");
role.setDistinguishedName(role.getKey());
role.setManage(false);
roles.add(role);
role = new Role("gptPublisher");
role.setInherits("gptRegisteredUser");
role.setDistinguishedName(role.getKey());
role.setManage(false);
roles.add(role);
role = new Role("gptAdministrator");
role.setInherits("gptPublisher");
role.setDistinguishedName(role.getKey());
role.setManage(false);
roles.add(role);
for (Role role2 : roles.values()) role2.buildFullRoleSet(roles);
}
// LDAP adapter configuration
if (ndLdap != null) {
// connection properties & service account
Node ndCon = (Node) xpath.evaluate("ldapConnectionProperties", ndLdap,
XPathConstants.NODE);
if (ndCon != null) {
LdapConnectionProperties props = ldapConfig.getConnectionProperties();
props.setProviderUrl(xpath.evaluate("@providerURL", ndCon));
props.setInitialContextFactoryName(xpath.evaluate(
"@initialContextFactoryName", ndCon));
props.setSecurityAuthenticationLevel(xpath.evaluate(
"@securityAuthentication", ndCon));
props.setSecurityProtocol(xpath.evaluate("@securityProtocol", ndCon));
Node ndService = (Node) xpath.evaluate("ldapServiceAccount", ndCon, XPathConstants.NODE);
if (ndService != null) {
String sUser = xpath.evaluate("@securityPrincipal", ndService);
String sPwd = xpath.evaluate("@securityCredentials", ndService);
boolean bEncrypted = Val.chkBool(xpath.evaluate("@encrypted", ndService), false);
if (bEncrypted) {
try {
String sDecrypted = PC1_Encryptor.decrypt(sPwd);
sPwd = sDecrypted;
} catch (Exception e) {
this.getLogger().log(Level.SEVERE, "The securityCredentials failed to decrypt.", e);
}
}
UsernamePasswordCredentials creds = new UsernamePasswordCredentials(sUser, sPwd);
props.setServiceAccountCredentials(creds);
idConfig.setCatalogAdminDN(xpath.evaluate("@catalogAdminDN", ndService));
}
}
// single sign-on mechanism
Node ndSSO = (Node) xpath.evaluate("singleSignOn", ndLdap,
XPathConstants.NODE);
if (ndSSO != null) {
SingleSignOnMechanism sso = idConfig.getSingleSignOnMechanism();
sso.setActive(Val.chkBool(xpath.evaluate("@active", ndSSO), false));
sso.setCredentialLocation(xpath.evaluate("@credentialLocation", ndSSO));
sso.setAnonymousValue(xpath.evaluate("@anonymousValue", ndSSO));
}
// self care support
Node ndSupport = (Node) xpath.evaluate("selfCareSupport", ndLdap,
XPathConstants.NODE);
if (ndSupport != null) {
IdentitySupport support = idConfig.getSupportedFunctions();
support.setSupportsLogin(Val.chkBool(xpath.evaluate("@supportsLogin",
ndSupport), true));
support.setSupportsLogout(Val.chkBool(xpath.evaluate("@supportsLogout",
ndSupport), true));
support.setSupportsUserRegistration(Val.chkBool(xpath.evaluate(
"@supportsUserRegistration", ndSupport), false));
support.setSupportsUserProfileManagement(Val.chkBool(xpath.evaluate(
"@supportsUserProfileManagement", ndSupport), false));
support.setSupportsPasswordChange(Val.chkBool(xpath.evaluate(
"@supportsPasswordChange", ndSupport), false));
support.setSupportsPasswordRecovery(Val.chkBool(xpath.evaluate(
"@supportsPasswordRecovery", ndSupport), false));
}
// roles
Node ndRoles = (Node) xpath.evaluate("roles", ndLdap, XPathConstants.NODE);
if (ndRoles != null) {
Roles roles = idConfig.getConfiguredRoles();
String sRegUserKey = Val.chkStr(xpath.evaluate("@registeredUserRoleKey",
ndRoles));
if (sRegUserKey.length() == 0) {
sRegUserKey = "gptRegisteredUser";
}
roles.setAuthenticatedUserRequiresRole(Val.chkBool(xpath.evaluate(
"@authenticatedUserRequiresRole", ndRoles), true));
roles.setRegisteredUserRoleKey(sRegUserKey);
NodeList nlRoles = (NodeList) xpath.evaluate("role", ndRoles,
XPathConstants.NODESET);
for (int i = 0; i < nlRoles.getLength(); i++) {
Node ndRole = nlRoles.item(i);
Role role = new Role();
role.setKey(xpath.evaluate("@key", ndRole));
role.setInherits(xpath.evaluate("@inherits", ndRole));
role.setResKey(xpath.evaluate("@resKey", ndRole));
role.setManage(Val.chkBool(xpath.evaluate("@manage", ndRole),true));
role.setForbidden(Val.chkBool(xpath.evaluate("@forbidden", ndRole),false));
role.setDistinguishedName(xpath.evaluate("@groupDN", ndRole));
roles.add(role);
}
for (Role role : roles.values()) {
role.buildFullRoleSet(roles);
}
}
// user properties
Node ndUser = (Node) xpath.evaluate("users", ndLdap, XPathConstants.NODE);
if (ndUser != null) {
LdapUserProperties props = ldapConfig.getUserProperties();
props.setUserDisplayNameAttribute(xpath.evaluate("@displayNameAttribute",
ndUser));
props.setPasswordEncryptionAlgorithm(xpath.evaluate(
"@passwordEncryptionAlgorithm", ndUser));
props.setUserDNPattern(xpath.evaluate("@newUserDNPattern", ndUser));
props.setUsernameSearchPattern(xpath.evaluate("@usernameSearchPattern",
ndUser));
props.setUserRequestsSearchPattern(xpath.evaluate("@newUserRequestSearchPattern",
ndUser));
props.setUserSearchDIT(xpath.evaluate("@searchDIT", ndUser));
NodeList nlObj = (NodeList) xpath.evaluate(
"requiredObjectClasses/objectClass/@name", ndUser,
XPathConstants.NODESET);
for (int i = 0; i < nlObj.getLength(); i++) {
props.addUserObjectClass(nlObj.item(i).getNodeValue());
}
}
// user profile parameters
UserAttributeMap uaMap = idConfig.getUserAttributeMap();
NodeList nlUserAttr = (NodeList) xpath.evaluate(
"users/userAttributeMap/attribute", ndLdap, XPathConstants.NODESET);
for (int i = 0; i < nlUserAttr.getLength(); i++) {
UserAttribute attr = new UserAttribute();
attr.setKey(xpath.evaluate("@key", nlUserAttr.item(i)));
attr.setLdapName(xpath.evaluate("@ldapName", nlUserAttr.item(i)));
// TODO: need to do a better check to filter out badly defined
// parameters
boolean bIsLdap = (idConfig.getAdapterClassName().indexOf("Ldap") != -1);
if (bIsLdap && (attr.getLdapName().length() > 0)) {
uaMap.add(attr);
}
}
ldapConfig.getUserProperties().getUserProfileMapping().configureFromUserAttributes(uaMap);
// group properties
Node ndGroup = (Node) xpath.evaluate("groups", ndLdap, XPathConstants.NODE);
if (ndGroup != null) {
LdapGroupProperties props = ldapConfig.getGroupProperties();
props.setGroupDisplayNameAttribute(xpath.evaluate(
"@displayNameAttribute", ndGroup));
props.setGroupDynamicMemberAttribute(xpath.evaluate(
"@dynamicMemberOfGroupsAttribute", ndGroup));
props.setGroupDynamicMembersAttribute(xpath.evaluate(
"@dynamicMembersAttribute", ndGroup));
props.setGroupMemberAttribute(xpath.evaluate("@memberAttribute", ndGroup));
props.setGroupMemberSearchPattern(xpath.evaluate("@memberSearchPattern",
ndGroup));
props.setGroupNameSearchPattern(xpath.evaluate("@nameSearchPattern",
ndGroup));
props.setGroupSearchDIT(xpath.evaluate("@searchDIT", ndGroup));
}
// metadata management groups
NodeList nlMmg = (NodeList) xpath.evaluate("groups/metadataManagementGroup",ndLdap,XPathConstants.NODESET);
for (int i = 0; i < nlMmg.getLength(); i++) {
Node ndMmg = nlMmg.item(i);
Group group = new Group();
group.setDistinguishedName(xpath.evaluate("@groupDN", ndMmg));
group.setKey(group.getDistinguishedName());
group.setName(xpath.evaluate("@name", ndMmg));
idConfig.getMetadataManagementGroups().add(group);
}
}
// open providers
NodeList nlOpenProviders = (NodeList)xpath.evaluate("openProviders/openProvider",ndIdentity,XPathConstants.NODESET);
for (int i=0; i<nlOpenProviders.getLength(); i++) {
try {
OpenProvider op = new OpenProvider();
op.processConfgurationNode(idConfig.getOpenProviders(),nlOpenProviders.item(i));
} catch (Exception e) {
this.getLogger().log(Level.SEVERE,"Error while configuring openProvider.",e);
}
}
}
/**
* Loads the interactive map configuration.
*
* @param appConfig
* the primary application configuration
* @param dom
* the configuration document
* @param root
* the root node for the document
* @throws XPathExpressionException
* indicates a programming error, bad XPath
*/
private void loadInteractiveMap(ApplicationConfiguration appConfig,
Document dom, Node root) throws XPathExpressionException {
XPath xpath = XPathFactory.newInstance().newXPath();
appConfig.getInteractiveMap().setJsapiUrl(
xpath.evaluate("interactiveMap/@jsapiUrl", root));
appConfig.getInteractiveMap().setMapServiceUrl(
xpath.evaluate("interactiveMap/@mapServiceUrl", root));
appConfig.getInteractiveMap().setMapServiceType(
xpath.evaluate("interactiveMap/@mapServiceType", root));
appConfig.getInteractiveMap().setGeometryServiceUrl(
xpath.evaluate("interactiveMap/@geometryServiceUrl", root));
appConfig.getInteractiveMap().setLocatorUrl(
xpath.evaluate("interactiveMap/@locatorUrl", root));
appConfig.getInteractiveMap().setLocatorSingleFieldParameter(
xpath.evaluate("interactiveMap/@locatorSingleFieldParameter", root));
appConfig.getInteractiveMap().setMapVisibleLayers(
xpath.evaluate("interactiveMap/@mapVisibleLayers", root));
appConfig.getInteractiveMap().setMapInitialExtent(
xpath.evaluate("interactiveMap/@mapInitialExtent", root));
}
/**
* Loads the mail configuration.
*
* @param appConfig
* the primary application configuration
* @param dom
* the configuration document
* @param root
* the root node for the document
* @throws XPathExpressionException
* indicates a programming error, bad XPath
*/
private void loadMail(ApplicationConfiguration appConfig, Document dom,
Node root) throws XPathExpressionException {
XPath xpath = XPathFactory.newInstance().newXPath();
MailConfiguration mcfg = appConfig.getMailConfiguration();
mcfg.put(
xpath.evaluate("mail/@smtpHost", root),
xpath.evaluate("mail/@smtpPort", root),
xpath.evaluate("mail/@siteEmailAddress", root),
xpath.evaluate("mail/@siteEmailAddress", root));
mcfg.setEmailAddressRegexp(xpath.evaluate("mail/@emailAddressRegexp", root));
Node ndAuth = (Node) xpath.evaluate("mail/smtpAuth", root, XPathConstants.NODE);
if (ndAuth != null) {
String sUser = Val.chkStr(xpath.evaluate("@username", ndAuth));
String sPwd = xpath.evaluate("@password", ndAuth);
boolean bEncrypted = Val.chkBool(xpath.evaluate("@encrypted", ndAuth), false);
if (bEncrypted) {
try {
String sDecrypted = PC1_Encryptor.decrypt(sPwd);
sPwd = sDecrypted;
} catch (Exception e) {
this.getLogger().log(Level.SEVERE, "The smptAuth password failed to decrypt.", e);
}
}
if ((sUser != null) && (sUser.length() > 0) && (sPwd != null) && (sPwd.length() > 0)) {
mcfg.setPasswordAuthentication(new javax.mail.PasswordAuthentication(sUser, sPwd));
}
}
// load custom properties
NodeList ndProps = (NodeList) xpath.evaluate("mail/property", root, XPathConstants.NODESET);
if (ndProps != null) {
for (int i=0; i < ndProps.getLength(); i++) {
Node ndProp = ndProps.item(i);
String sKey = StringUtils.trimToEmpty(xpath.evaluate("@key", ndProp));
String sValue = StringUtils.trimToEmpty(xpath.evaluate("@value", ndProp));
if (!sKey.isEmpty() && !sValue.isEmpty()) {
mcfg.setProperty(sKey, sValue);
}
}
}
}
/**
* Loads the scheduler configuration.
*
* @param appConfig
* the primary application configuration
* @param dom
* the configuration document
* @param root
* the root node for the document
* @throws XPathExpressionException
* indicates a programming error, bad XPath
*/
private void loadScheduler(ApplicationConfiguration appConfig, Document dom,
Node root) throws XPathExpressionException {
XPath xpath = XPathFactory.newInstance().newXPath();
ThreadSchedulerConfiguration tsConfig = appConfig.getThreadSchedulerConfiguration();
Node ndScheduler = (Node) xpath.evaluate("scheduler", root,
XPathConstants.NODE);
if (ndScheduler == null) {
return;
}
// primary parameters
tsConfig.setActive(Val.chkBool(xpath.evaluate("@active", ndScheduler), false));
tsConfig.setCorePoolSize(Val.chkInt(xpath.evaluate("@corePoolSize",
ndScheduler), 0));
// threads
NodeList nlThreads = (NodeList) xpath.evaluate("thread", ndScheduler,
XPathConstants.NODESET);
for (int i = 0; i < nlThreads.getLength(); i++) {
Node ndThread = nlThreads.item(i);
String sClass = Val.chkStr(xpath.evaluate("@class", ndThread));
String sDelay = Val.chkStr(xpath.evaluate("@delay", ndThread));
String sPeriod = Val.chkStr(xpath.evaluate("@period", ndThread));
String sAt = Val.chkStr(xpath.evaluate("@at", ndThread));
// read parameters
StringAttributeMap parameters = new StringAttributeMap();
populateParameters(parameters, ndThread);
// add definition
tsConfig.addDefinition(sClass, sDelay, sPeriod, sAt, parameters);
}
}
/**
* Loads the download data configuration.
*
* @param appConfig
* the primary application configuration
* @param dom
* the configuration document
* @param root
* the root node for the document
* @throws XPathExpressionException
* indicates a programming error, bad XPath
*/
private void loadDownloadData(ApplicationConfiguration appConfig, Document dom,
Node root) throws XPathExpressionException {
// download configuration
XPath xpath = XPathFactory.newInstance().newXPath();
Node ndDownload = (Node) xpath.evaluate("downloadData", root,
XPathConstants.NODE);
if (ndDownload != null) {
DownloadConfiguration cfg = appConfig.getDownloadDataConfiguration();
cfg.setTaskUrl(xpath.evaluate("@taskUrl", ndDownload));
cfg.setMapServiceUrl(xpath.evaluate("@mapServiceUrl", ndDownload));
cfg.setMapServiceType(xpath.evaluate("@mapServiceType", ndDownload));
cfg.setMapInitialExtent(xpath.evaluate("@mapInitialExtent", ndDownload));
// load projections
NodeList ndProjections = (NodeList) xpath.evaluate(
"projections/projection", ndDownload, XPathConstants.NODESET);
for (int i = 0; i < ndProjections.getLength(); i++) {
Node node = ndProjections.item(i);
String key = xpath.evaluate("@key", node);
String alias = xpath.evaluate("@alias", node);
String resKey = xpath.evaluate("@resKey", node);
ItemInfo ii = new ItemInfo(key, alias, resKey);
cfg.getProjectionInfo().add(ii);
}
// load output format
NodeList ndFormats = (NodeList) xpath.evaluate("formats/format",
ndDownload, XPathConstants.NODESET);
for (int i = 0; i < ndFormats.getLength(); i++) {
Node node = ndFormats.item(i);
String key = xpath.evaluate("@key", node);
String alias = xpath.evaluate("@alias", node);
String resKey = xpath.evaluate("@resKey", node);
ItemInfo ii = new ItemInfo(key, alias, resKey);
cfg.getOutputFormatInfo().add(ii);
}
// load feature formats
NodeList ndFeatureFormats = (NodeList) xpath.evaluate("features/feature",
ndDownload, XPathConstants.NODESET);
for (int i = 0; i < ndFeatureFormats.getLength(); i++) {
Node node = ndFeatureFormats.item(i);
String key = xpath.evaluate("@key", node);
String alias = xpath.evaluate("@alias", node);
String resKey = xpath.evaluate("@resKey", node);
ItemInfo ii = new ItemInfo(key, alias, resKey);
cfg.getFeatureFormatInfo().add(ii);
}
// load raster formats
NodeList ndRasterFormats = (NodeList) xpath.evaluate("rasters/raster",
ndDownload, XPathConstants.NODESET);
for (int i = 0; i < ndRasterFormats.getLength(); i++) {
Node node = ndRasterFormats.item(i);
String key = xpath.evaluate("@key", node);
String alias = xpath.evaluate("@alias", node);
String resKey = xpath.evaluate("@resKey", node);
ItemInfo ii = new ItemInfo(key, alias, resKey);
cfg.getRasterFormatInfo().add(ii);
}
}
}
/**
* Loads harvester configuration.
* @param appConfig application configuration
*/
private void loadHarvesterConfiguration(ApplicationConfiguration appConfig, Document dom, Node root) throws XPathExpressionException {
StringAttributeMap parameters = appConfig.getCatalogConfiguration().getParameters();
HarvesterConfiguration cfg = appConfig.getHarvesterConfiguration();
String active = Val.chkStr(parameters.getValue("webharvester.active"));
String suspended = Val.chkStr(parameters.getValue("webharvester.suspended"));
String queueEnabled = Val.chkStr(parameters.getValue("webharvester.queueEnabled"));
String poolsize = Val.chkStr(parameters.getValue("webharvester.poolSize"));
String autoselectfrequency = Val.chkStr(parameters.getValue("webharvester.autoSelectFrequency"));
String watchdogfrequency = Val.chkStr(parameters.getValue("webharvester.watchDogFrequency"));
String basecontextpath = Val.chkStr(parameters.getValue("webharvester.baseContextPath"));
String maxRepRecords = Val.chkStr(parameters.getValue("webharvester.maxRepRecords"));
String maxRepErrors = Val.chkStr(parameters.getValue("webharvester.maxRepErrors"));
String resourceAutoApprove = Val.chkStr(parameters.getValue("webharvester.resource.autoApprove"));
Logger logger = getLogger();
if (Val.chkBool(active, true)) {
cfg.setActive(true);
cfg.setQueueEnabled(true);
} else {
cfg.setActive(false);
cfg.setQueueEnabled(false);
}
if (Val.chkBool(suspended, false)) {
cfg.setSuspended(true);
} else {
cfg.setSuspended(false);
}
if (queueEnabled.length()>0) {
cfg.setQueueEnabled(Val.chkBool(queueEnabled, cfg.getQueueEnabled()) || cfg.getActive());
}
if (poolsize.length() > 0) {
try {
int num = Integer.parseInt(poolsize);
if (num <= 0) {
logger.info("[SYNCHRONIZER] Parameter \"webharvester.poolSize\" less or equal to zero. No harvestig will be performed.");
}
cfg.setPoolSize(num);
} catch (NumberFormatException ex) {
logger.log(Level.INFO, "[SYNCHRONIZER] Invalid \"webharvester.poolSize\" parameter. Default {0} will be used instead.", HarvesterConfiguration.DEFAULT_POOL_SIZE);
cfg.setPoolSize(HarvesterConfiguration.DEFAULT_POOL_SIZE);
}
} else {
logger.log(Level.INFO, "[SYNCHRONIZER] Missing \"webharvester.poolSize\" parameter. Default {0} will be used instead.", HarvesterConfiguration.DEFAULT_POOL_SIZE);
cfg.setPoolSize(HarvesterConfiguration.DEFAULT_POOL_SIZE);
}
if (autoselectfrequency.length() > 0) {
try {
TimePeriod tp = TimePeriod.parseValue(autoselectfrequency);
cfg.setAutoSelectFrequency(tp);
} catch (NumberFormatException ex) {
logger.log(Level.INFO, "[SYNCHRONIZER] Invalid \"webharvester.autoSelectFrequency\" parameter. Default {0} will be used instead.", HarvesterConfiguration.AUTOSELECT_FREQUENCY);
cfg.setAutoSelectFrequency(new TimePeriod(HarvesterConfiguration.AUTOSELECT_FREQUENCY));
}
} else {
logger.log(Level.INFO, "[SYNCHRONIZER] Missing \"webharvester.autoSelectFrequency\" parameter. Default {0} will be used instead.", HarvesterConfiguration.AUTOSELECT_FREQUENCY);
cfg.setAutoSelectFrequency(new TimePeriod(HarvesterConfiguration.AUTOSELECT_FREQUENCY));
}
if (watchdogfrequency.length() > 0) {
try {
TimePeriod tp = TimePeriod.parseValue(watchdogfrequency);
cfg.setWatchDogFrequency(tp);
} catch (NumberFormatException ex) {
logger.log(Level.INFO, "[SYNCHRONIZER] Invalid \"webharvester.watchDogFrequency\" parameter. Default {0} will be used instead.", HarvesterConfiguration.WATCHDOG_FREQUENCY);
cfg.setWatchDogFrequency(new TimePeriod(HarvesterConfiguration.WATCHDOG_FREQUENCY));
}
} else {
logger.log(Level.INFO, "[SYNCHRONIZER] Missing \"webharvester.watchDogFrequency\" parameter. Default {0} will be used instead.", HarvesterConfiguration.WATCHDOG_FREQUENCY);
cfg.setWatchDogFrequency(new TimePeriod(HarvesterConfiguration.WATCHDOG_FREQUENCY));
}
if (basecontextpath.length() > 0) {
cfg.setBaseContextPath(basecontextpath);
} else {
String reverseProxyPath = Val.chkStr(parameters.getValue("reverseProxy.baseContextPath"));
if (reverseProxyPath.length() > 0) {
logger.info("[SYNCHRONIZER] Missing \"webharvester.baseContextPath\" parameter. Value of \"reverseProxy.baseContextPath\" will be used instead.");
} else {
logger.info("[SYNCHRONIZER] Missing \"webharvester.baseContextPath\" parameter. Harvest notification messages will be sent without information about harvest report.");
}
}
if (maxRepRecords.length() > 0) {
try {
long num = Long.parseLong(maxRepRecords);
if (num < 0) {
logger.info("[SYNCHRONIZER] Parameter \"webharvester.maxRepRecords\" less than zero. No limits will be set.");
}
cfg.setMaxRepRecords(num);
} catch (NumberFormatException ex) {
logger.log(Level.INFO, "[SYNCHRONIZER] Invalid \"webharvester.maxRepRecords\" parameter. Default {0} will be used instead.", HarvesterConfiguration.MAX_REP_RECORDS);
cfg.setMaxRepRecords(HarvesterConfiguration.MAX_REP_RECORDS);
}
} else {
logger.log(Level.INFO, "[SYNCHRONIZER] Missing \"webharvester.maxRepRecords\" parameter. Default {0} will be used instead.", HarvesterConfiguration.MAX_REP_RECORDS);
cfg.setMaxRepRecords(HarvesterConfiguration.MAX_REP_RECORDS);
}
if (maxRepErrors.length() > 0) {
try {
long num = Long.parseLong(maxRepErrors);
if (num < 0) {
logger.info("[SYNCHRONIZER] Parameter \"webharvester.maxRepErrors\" less than zero. No limits will be set.");
}
cfg.setMaxRepErrors(num);
} catch (NumberFormatException ex) {
logger.log(Level.INFO, "[SYNCHRONIZER] Invalid \"webharvester.maxRepErrors\" parameter. Default {0} will be used instead.", HarvesterConfiguration.MAX_REP_ERRORS);
cfg.setMaxRepErrors(HarvesterConfiguration.MAX_REP_ERRORS);
}
} else {
logger.log(Level.INFO, "[SYNCHRONIZER] Missing \"webharvester.maxRepErrors\" parameter. Default {0} will be used instead.", HarvesterConfiguration.MAX_REP_ERRORS);
cfg.setMaxRepErrors(HarvesterConfiguration.MAX_REP_ERRORS);
}
if (resourceAutoApprove.length() > 0) {
boolean bool = Val.chkBool(resourceAutoApprove,HarvesterConfiguration.RESOURCE_AUTOAPPROVE);
cfg.setResourceAutoApprove(bool);
} else {
logger.log(Level.INFO, "[SYNCHRONIZER] Missing \"webharvester.resource.autoApprove\" parameter. Default {0} will be used instead.", HarvesterConfiguration.RESOURCE_AUTOAPPROVE);
cfg.setResourceAutoApprove(HarvesterConfiguration.RESOURCE_AUTOAPPROVE);
}
// load data processor factories
XPath xpath = XPathFactory.newInstance().newXPath();
// add local data processor factory by default
cfg.getDataProcessorFactories().add(new LocalDataProcessorFactory());
// get root of webharvester configuration
Node ndWebHarvester = (Node) xpath.evaluate("webharvester", root, XPathConstants.NODE);
if (ndWebHarvester!=null) {
// create and initialize data processor for each netry in configuration
NodeList ndDataProcessorFactories = (NodeList) xpath.evaluate("dataProcessorFactory", ndWebHarvester, XPathConstants.NODESET);
for (Node ndDataProcessorFactory : new NodeListAdapter(ndDataProcessorFactories)) {
String className = Val.chkStr((String) xpath.evaluate("@className", ndDataProcessorFactory, XPathConstants.STRING));
String name = Val.chkStr((String) xpath.evaluate("@name", ndDataProcessorFactory, XPathConstants.STRING));
boolean enabled = Val.chkBool(Val.chkStr((String) xpath.evaluate("@enabled", ndDataProcessorFactory, XPathConstants.STRING)), true);
if (enabled) {
try {
Class factoryClass = Class.forName(className);
DataProcessorFactory processorFactory = (DataProcessorFactory) factoryClass.newInstance();
processorFactory.setName(name);
processorFactory.init(ndDataProcessorFactory);
cfg.getDataProcessorFactories().add(processorFactory);
} catch (Exception ex) {
getLogger().log(Level.SEVERE, "Error creating processor factory: "+Val.stripControls(className), ex);
}
} else {
if (LocalDataProcessorFactory.class.getCanonicalName().equals(className)) {
removeDataProcessorFactory(cfg.getDataProcessorFactories(), className);
}
}
}
}
}
private void removeDataProcessorFactory(List<DataProcessorFactory> factories, String factoryClassName) {
for (DataProcessorFactory factory : factories) {
if (factory.getClass().getCanonicalName().equals(factoryClassName)) {
factories.remove(factory);
break;
}
}
}
/**
* Loads protocol factories.
* @param appConfig protocol factories
*/
private void loadProtocolFactories(ApplicationConfiguration appConfig, Document dom, Node root) throws XPathExpressionException {
ProtocolFactories factories = appConfig.getProtocolFactories();
XPath xpath = XPathFactory.newInstance().newXPath();
Node ndProtocols = (Node) xpath.evaluate("protocols", root, XPathConstants.NODE);
if (ndProtocols!=null) {
// check 'default' attribute of the 'protcols' node; if present and true than initialize default factories
boolean defaultValue = Val.chkBool((String) xpath.evaluate("@default", ndProtocols, XPathConstants.STRING),false);
if (defaultValue) {
factories.initDefault();
}
// initilaize explicit protocol factories
NodeList lstProtocol = (NodeList) xpath.evaluate("protocol", ndProtocols, XPathConstants.NODESET);
for (Node ndProto : new NodeListAdapter(lstProtocol)) {
String factoryClass = (String) xpath.evaluate("@factoryClass", ndProto, XPathConstants.STRING);
try {
Class fc = Class.forName(factoryClass);
ProtocolFactory factory = (ProtocolFactory) fc.newInstance();
ProtocolInitializer.init(factory, ndProto);
String resourceKey = Val.chkStr((String) xpath.evaluate("@resourceKey", ndProto, XPathConstants.STRING));
factories.put(factory.getName(), factory, resourceKey);
} catch (Exception ex) {
getLogger().log(Level.WARNING, "Error loading protocol: "+Val.stripControls(factoryClass), ex);
}
String validatorFactoryClass = Val.chkStr((String) xpath.evaluate("validator/@factoryClass", ndProto, XPathConstants.STRING));
if (!validatorFactoryClass.isEmpty()) {
try {
Class fc = Class.forName(validatorFactoryClass);
IValidatorFactory factory = (IValidatorFactory) fc.newInstance();
ValidatorFactory.register(factory);
} catch (Exception ex) {
getLogger().log(Level.WARNING, "Error loading protocol validator factory: "+Val.stripControls(validatorFactoryClass), ex);
}
}
}
} else {
factories.initDefault();
}
}
/**
* Reads the parameters under a parent node.
*
* @param parameters
* the parameters to populate
* @param parent
* the parent node containing the parameters to read
* @throws XPathExpressionException
* indicates a programming error, bad XPath
*/
private void populateParameters(StringAttributeMap parameters, Node parent)
throws XPathExpressionException {
XPath xpath = XPathFactory.newInstance().newXPath();
NodeList nlParameters = (NodeList) xpath.evaluate("parameter", parent,
XPathConstants.NODESET);
for (int i = 0; i < nlParameters.getLength(); i++) {
Node ndParameter = nlParameters.item(i);
String sKey = Val.chkStr(xpath.evaluate("@key", ndParameter));
String sValue = Val.chkStr(xpath.evaluate("@value", ndParameter));
if (sKey.length() > 0) {
parameters.add(new StringAttribute(sKey, sValue));
}
}
}
/**
* Safely parses time period giving default value if time period can not be parsed.
* @param periodDef period definition to parse
* @param defaultValue default value if period definition cannot be parsed
* @return time period
*/
private TimePeriod parsePeriod(String periodDef, long defaultValue) {
try {
periodDef = Val.chkStr(periodDef);
if (periodDef.isEmpty()) {
return new TimePeriod(defaultValue);
}
return TimePeriod.parseValue(periodDef);
} catch (IllegalArgumentException ex) {
return new TimePeriod(defaultValue);
}
}
}
| |
/*
// Licensed to DynamoBI Corporation (DynamoBI) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. DynamoBI licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
*/
package org.eigenbase.applib.datetime;
import java.sql.Date;
import java.util.Calendar;
import java.util.GregorianCalendar;
import org.eigenbase.applib.resource.*;
/**
* Internal helper class for Time Dimension UDX Ported from
* //bb/bb713/server/Java/Broadbase/TimeDimensionInternal.java
*
* @author Elizabeth Lin
* @version $Id$
*/
public class TimeDimensionInternal
extends GregorianCalendar
{
//~ Instance fields --------------------------------------------------------
private int startYear;
private int startMonth;
private int startDate;
private int numDays;
private int fiscalYearStartMonth;
// day number in year when the current quarter starts
private int quarterStartDay;
// day number in year when the current fiscal quarter starts
private int fiscalQuarterStartDay;
// day number in year when the current fiscal year starts
private int fiscalYearStartDay;
private Date currentDate;
private Date firstOfWeekDate;
private Date lastOfWeekDate;
private Date firstOfMonthDate;
private Date lastOfMonthDate;
private Date firstOfQuarterDate;
private Date lastOfQuarterDate;
private Date firstOfYearDate;
private Date lastOfYearDate;
private Date firstOfFiscalQuarterDate;
private Date lastOfFiscalQuarterDate;
private Date firstOfFiscalYearDate;
private Date lastOfFiscalYearDate;
private Date firstOfFiscalWeekDate;
private Date lastOfFiscalWeekDate;
final int millisInADay = 1000 * 60 * 60 * 24;
//~ Constructors -----------------------------------------------------------
public TimeDimensionInternal()
{
return;
}
public TimeDimensionInternal(
int startYear,
int startMonth,
int startDate,
int endYear,
int endMonth,
int endDate,
int fiscalYearStartMonth)
throws ApplibException
{
// construct superclass
super(startYear, startMonth - 1, startDate);
ApplibResource res = ApplibResource.instance();
if ((startMonth < 0)
|| (startDate < 0)
|| (startMonth > 12)
|| (startDate > 31))
{
throw res.TimeDimInvalidStartDate.ex();
}
if ((endMonth < 0)
|| (endDate < 0)
|| (endMonth > 12)
|| (endDate > 31))
{
throw res.TimeDimInvalidEndDate.ex();
}
if ((fiscalYearStartMonth < 0) || (fiscalYearStartMonth > 12)) {
throw res.TimeDimInvalidFiscalStartMonth.ex();
}
long start = getTimeInMillis();
complete();
this.startMonth = get(Calendar.MONTH);
this.startYear = get(Calendar.YEAR);
this.startDate = get(Calendar.DATE);
set(endYear, endMonth - 1, endDate);
complete();
long end = getTimeInMillis();
if (start > end) {
throw res.TimeDimStartDayMustPrecedeEndDay.ex();
}
this.fiscalYearStartMonth = fiscalYearStartMonth - 1;
this.numDays = (int) Math.round((double) (end - start) / millisInADay);
// set back to known state
set(this.startYear, this.startMonth, this.startDate);
complete();
}
//~ Methods ----------------------------------------------------------------
public static String getDayOfWeek(int day)
throws ApplibException
{
switch (day) {
case 0:
return "Sunday";
case 1:
return "Monday";
case 2:
return "Tuesday";
case 3:
return "Wednesday";
case 4:
return "Thursday";
case 5:
return "Friday";
case 6:
return "Saturday";
default:
throw ApplibResource.instance().TimeDimInvalidDayOfWeek.ex();
}
}
public static String getMonthOfYear(int month)
throws ApplibException
{
switch (month) {
case 0:
return "January";
case 1:
return "February";
case 2:
return "March";
case 3:
return "April";
case 4:
return "May";
case 5:
return "June";
case 6:
return "July";
case 7:
return "August";
case 8:
return "September";
case 9:
return "October";
case 10:
return "November";
case 11:
return "December";
default:
throw ApplibResource.instance().TimeDimInvalidMonthOfYear.ex();
}
}
public void Start()
{
// set last date of month
set(
this.startYear,
this.startMonth,
getActualMaximum(Calendar.DAY_OF_MONTH));
complete();
this.lastOfMonthDate = new Date(getTimeInMillis());
// set first date of month
set(
this.startYear,
this.startMonth,
getActualMinimum(Calendar.DAY_OF_MONTH));
complete();
this.firstOfMonthDate = new Date(getTimeInMillis());
// set last date of quarter
add(Calendar.MONTH, 2 - (this.startMonth % 3));
set(Calendar.DAY_OF_MONTH, getActualMaximum(Calendar.DAY_OF_MONTH));
complete();
this.lastOfQuarterDate = new Date(getTimeInMillis());
// set first date, week, day of quarter
// set to minimum day first, so we won't get something unexpected from
// rolling back 2 months
set(Calendar.DAY_OF_MONTH, getActualMinimum(Calendar.DAY_OF_MONTH));
complete();
add(Calendar.MONTH, -2);
set(Calendar.DAY_OF_MONTH, getActualMinimum(Calendar.DAY_OF_MONTH));
complete();
this.firstOfQuarterDate = new Date(getTimeInMillis());
this.quarterStartDay = get(Calendar.DAY_OF_YEAR);
// set first date of year
set(Calendar.MONTH, getActualMinimum(Calendar.MONTH));
set(Calendar.DAY_OF_MONTH, getActualMinimum(Calendar.DAY_OF_MONTH));
complete();
this.firstOfYearDate = new Date(getTimeInMillis());
// set last date of year
set(Calendar.DAY_OF_YEAR, getActualMaximum(Calendar.DAY_OF_YEAR));
complete();
this.lastOfYearDate = new Date(getTimeInMillis());
// set first date, week, day of fiscal quarter
set(this.startYear, this.startMonth, this.startDate);
complete();
// set to minimum day first
set(Calendar.DAY_OF_MONTH, getActualMinimum(Calendar.DAY_OF_MONTH));
complete();
int fMth = (this.startMonth - this.fiscalYearStartMonth + 12) % 12;
add(Calendar.MONTH, -(fMth % 3));
set(Calendar.DAY_OF_MONTH, getActualMinimum(Calendar.DAY_OF_MONTH));
complete();
this.firstOfFiscalQuarterDate = new Date(getTimeInMillis());
this.fiscalQuarterStartDay = get(Calendar.DAY_OF_YEAR);
// set last date of fiscal quarter
add(Calendar.MONTH, 2);
set(Calendar.DAY_OF_MONTH, getActualMaximum(Calendar.DAY_OF_MONTH));
complete();
this.lastOfFiscalQuarterDate = new Date(getTimeInMillis());
// set first date, week, day of fiscal year
set(this.startYear, this.startMonth, this.startDate);
complete();
set(Calendar.MONTH, this.fiscalYearStartMonth);
complete();
set(Calendar.DAY_OF_MONTH, getActualMinimum(Calendar.DAY_OF_MONTH));
complete();
if (this.startMonth < this.fiscalYearStartMonth) {
add(Calendar.YEAR, -1);
}
this.firstOfFiscalYearDate = new Date(getTimeInMillis());
this.fiscalYearStartDay = get(Calendar.DAY_OF_YEAR);
// set last date of fiscal year
add(Calendar.MONTH, 11);
set(Calendar.DAY_OF_MONTH, getActualMaximum(Calendar.DAY_OF_MONTH));
complete();
this.lastOfFiscalYearDate = new Date(getTimeInMillis());
// set calendar back to start date
set(this.startYear, this.startMonth, this.startDate);
complete();
// set first dates for week
int daysPastFirst = get(Calendar.DAY_OF_WEEK) - getFirstDayOfWeek();
if (daysPastFirst < 0) {
daysPastFirst += 7;
}
add(Calendar.DAY_OF_MONTH, -(daysPastFirst));
long firstOfWeek = getTimeInMillis();
this.firstOfWeekDate = new Date(firstOfWeek);
if (this.firstOfWeekDate.before(this.firstOfYearDate)) {
this.firstOfWeekDate.setTime(this.firstOfYearDate.getTime());
}
// set calendar back to start date
set(this.startYear, this.startMonth, this.startDate);
complete();
// set last dates for week
int daysToLast = 6 - daysPastFirst;
add(Calendar.DAY_OF_MONTH, daysToLast);
long lastOfWeek = getTimeInMillis();
this.lastOfWeekDate = new Date(lastOfWeek);
if (this.lastOfWeekDate.after(this.lastOfYearDate)) {
this.lastOfWeekDate.setTime(this.lastOfYearDate.getTime());
}
// set calendar back to start date
set(this.startYear, this.startMonth, this.startDate);
complete();
// set first/last fiscal dates of week
this.firstOfFiscalWeekDate = new Date(firstOfWeek);
if (this.firstOfFiscalWeekDate.before(this.firstOfFiscalYearDate)) {
this.firstOfFiscalWeekDate.setTime(
this.firstOfFiscalYearDate.getTime());
}
this.lastOfFiscalWeekDate = new Date(lastOfWeek);
if (this.lastOfFiscalWeekDate.after(this.lastOfFiscalYearDate)) {
this.lastOfFiscalWeekDate.setTime(
this.firstOfFiscalYearDate.getTime());
}
this.currentDate = new Date(getTimeInMillis());
}
public Date getFirstDayOfWeekDate()
{
return this.firstOfWeekDate;
}
public Date getLastDayOfWeekDate()
{
return this.lastOfWeekDate;
}
public Date getFirstDayOfMonthDate()
{
return this.firstOfMonthDate;
}
public Date getLastDayOfMonthDate()
{
return this.lastOfMonthDate;
}
public Date getFirstDayOfQuarterDate()
{
return this.firstOfQuarterDate;
}
public Date getLastDayOfQuarterDate()
{
return this.lastOfQuarterDate;
}
public Date getFirstDayOfYearDate()
{
return this.firstOfYearDate;
}
public Date getLastDayOfYearDate()
{
return this.lastOfYearDate;
}
public int getDayOfWeek()
{
return get(Calendar.DAY_OF_WEEK);
}
public int getNumDays()
{
return this.numDays;
}
public int getYear()
{
return get(Calendar.YEAR);
}
public int getJulianDay()
{
int ret = (int) (getTimeInMillis() / millisInADay);
if (getYear() < 1970) {
ret--;
}
return ret;
}
public int getMonth()
{
return get(Calendar.MONTH) + 1;
}
public int getDayOfMonth()
{
return get(Calendar.DAY_OF_MONTH);
}
public int getDayOfQuarter()
{
return get(Calendar.DAY_OF_YEAR) - this.quarterStartDay + 1;
}
public int getDayOfYear()
{
return get(Calendar.DAY_OF_YEAR);
}
public int getWeekOfQuarter()
{
return WeekFrom(this.firstOfQuarterDate);
}
public int getWeekOfMonth()
{
return get(Calendar.WEEK_OF_MONTH);
}
public int getWeek()
{
return WeekFrom(this.firstOfYearDate);
}
public Date getDate()
{
return currentDate;
}
// this helper function assumes startDate < currentDate
// and returns week number of current date with respect to startDate
private int WeekFrom(Date startDate)
{
long startTime = startDate.getTime();
// this rounding is for day light saving time in the US
long days =
Math.round((double) (getTimeInMillis() - startTime) / millisInADay);
return (int) (1 + (days / 7)
+ ((get(Calendar.DAY_OF_WEEK) <= (days % 7)) ? 1 : 0));
}
public int getWeekOfFiscalMonth()
{
return WeekFrom(this.firstOfMonthDate);
}
public int getFiscalMonth()
{
return ((get(Calendar.MONTH) - this.fiscalYearStartMonth + 12) % 12)
+ 1;
}
public int getDayOfFiscalQuarter()
{
int doy = get(Calendar.DAY_OF_YEAR);
int dofq;
if (doy >= this.fiscalQuarterStartDay) {
dofq = doy - this.fiscalQuarterStartDay + 1;
} else {
long tempTime = getTimeInMillis();
add(Calendar.YEAR, -1);
dofq =
getActualMaximum(Calendar.DAY_OF_YEAR)
- this.fiscalQuarterStartDay + doy + 1;
setTimeInMillis(tempTime);
complete();
}
return dofq;
}
public int getWeekOfFiscalQuarter()
{
return WeekFrom(this.firstOfFiscalQuarterDate);
}
public int getFiscalQuarter()
{
return ((this.getFiscalMonth() - 1) / 3) + 1;
}
public int getDayOfFiscalYear()
{
int doy = get(Calendar.DAY_OF_YEAR);
int dofy;
if (doy >= this.fiscalYearStartDay) {
dofy = doy - this.fiscalYearStartDay + 1;
} else {
long tempTime = getTimeInMillis();
add(Calendar.YEAR, -1);
dofy =
getActualMaximum(Calendar.DAY_OF_YEAR)
- this.fiscalYearStartDay + doy + 1;
setTimeInMillis(tempTime);
complete();
}
return dofy;
}
public int getWeekOfFiscalYear()
{
return WeekFrom(this.firstOfFiscalYearDate);
}
public int getFiscalYear()
{
// The fiscal year is referred to by the date in which it ends.
// For example, if a company's fiscal year ends October 31, 2006, then
// everything between November 1, 2005 and October 31, 2006 would be
// referred to as FY 2006
if ((this.fiscalYearStartMonth == Calendar.JANUARY)
|| (get(Calendar.MONTH) < this.fiscalYearStartMonth))
{
return get(Calendar.YEAR);
} else {
return get(Calendar.YEAR) + 1;
}
}
public Date getFirstDayOfFiscalWeekDate()
{
return this.firstOfFiscalWeekDate;
}
public Date getLastDayOfFiscalWeekDate()
{
return this.lastOfFiscalWeekDate;
}
public Date getFirstDayOfFiscalQuarterDate()
{
return this.firstOfFiscalQuarterDate;
}
public Date getLastDayOfFiscalQuarterDate()
{
return this.lastOfFiscalQuarterDate;
}
public Date getFirstDayOfFiscalYearDate()
{
return this.firstOfFiscalYearDate;
}
public Date getLastDayOfFiscalYearDate()
{
return this.lastOfFiscalYearDate;
}
public void increment()
{
add(Calendar.DATE, 1);
complete();
long currentTime = getTimeInMillis();
currentDate.setTime(currentTime);
boolean isFirstDayOfYear = false;
boolean isFiscalFirstDayOfYear = false;
// update first/last day of month/quarter/year
if (get(Calendar.DAY_OF_MONTH) == 1) {
int month = get(Calendar.MONTH);
this.firstOfMonthDate.setTime(getTimeInMillis());
set(Calendar.DAY_OF_MONTH, getActualMaximum(Calendar.DAY_OF_MONTH));
complete();
this.lastOfMonthDate.setTime(getTimeInMillis());
setTimeInMillis(currentTime);
complete();
if ((month % 3) == 0) {
if (month == 0) {
isFirstDayOfYear = true;
this.firstOfYearDate.setTime(currentTime);
set(
Calendar.DAY_OF_YEAR,
getActualMaximum(Calendar.DAY_OF_YEAR));
complete();
this.lastOfYearDate.setTime(getTimeInMillis());
setTimeInMillis(currentTime);
complete();
}
this.firstOfQuarterDate.setTime(currentTime);
this.quarterStartDay = get(Calendar.DAY_OF_YEAR);
add(Calendar.MONTH, 2);
set(
Calendar.DAY_OF_MONTH,
getActualMaximum(Calendar.DAY_OF_MONTH));
complete();
this.lastOfQuarterDate.setTime(getTimeInMillis());
setTimeInMillis(currentTime);
complete();
}
int fMonth = (month - this.fiscalYearStartMonth + 12) % 12;
if ((fMonth % 3) == 0) {
if (fMonth == 0) {
isFiscalFirstDayOfYear = true;
this.firstOfFiscalYearDate.setTime(currentTime);
this.fiscalYearStartDay = get(Calendar.DAY_OF_YEAR);
add(Calendar.YEAR, 1);
this.lastOfFiscalYearDate.setTime(
getTimeInMillis() - millisInADay);
setTimeInMillis(currentTime);
complete();
}
this.firstOfFiscalQuarterDate.setTime(currentTime);
this.fiscalQuarterStartDay = get(Calendar.DAY_OF_YEAR);
add(Calendar.MONTH, 2);
set(
Calendar.DAY_OF_MONTH,
getActualMaximum(Calendar.DAY_OF_MONTH));
complete();
this.lastOfFiscalQuarterDate.setTime(getTimeInMillis());
setTimeInMillis(currentTime);
complete();
}
}
// update first/last day of week
int currentDayOfWeek = get(Calendar.DAY_OF_WEEK);
if (isFirstDayOfYear) {
this.firstOfWeekDate.setTime(currentTime);
int lastDayOfWeek = (((getFirstDayOfWeek() - 1) + 6) % 7) + 1;
if (lastDayOfWeek >= currentDayOfWeek) {
add(Calendar.DAY_OF_MONTH, (lastDayOfWeek - currentDayOfWeek));
this.lastOfWeekDate.setTime(getTimeInMillis());
// reset calendar back to current time
setTimeInMillis(currentTime);
complete();
} else {
add(
Calendar.DAY_OF_MONTH,
(lastDayOfWeek - currentDayOfWeek + 7));
this.lastOfWeekDate.setTime(getTimeInMillis());
// reset calendar back to current time
setTimeInMillis(currentTime);
complete();
}
} else if (getFirstDayOfWeek() == currentDayOfWeek) {
this.firstOfWeekDate.setTime(currentTime);
add(Calendar.DAY_OF_MONTH, 6);
this.lastOfWeekDate.setTime(getTimeInMillis());
//reset calendar back to current time
setTimeInMillis(currentTime);
complete();
// if last day of week is into the new year, set it to the last
// day of the year
if (this.lastOfWeekDate.after(this.lastOfYearDate)) {
this.lastOfWeekDate.setTime(this.lastOfYearDate.getTime());
}
}
// update first/last fiscal day of week
if (isFiscalFirstDayOfYear) {
this.firstOfFiscalWeekDate.setTime(currentTime);
int lastDayOfWeek = (((getFirstDayOfWeek() - 1) + 6) % 7) + 1;
if (lastDayOfWeek >= currentDayOfWeek) {
add(Calendar.DAY_OF_MONTH, (lastDayOfWeek - currentDayOfWeek));
this.lastOfFiscalWeekDate.setTime(getTimeInMillis());
// reset calendar back to current time
setTimeInMillis(currentTime);
complete();
} else {
add(
Calendar.DAY_OF_MONTH,
(lastDayOfWeek - currentDayOfWeek + 7));
this.lastOfFiscalWeekDate.setTime(getTimeInMillis());
complete();
}
} else if (getFirstDayOfWeek() == currentDayOfWeek) {
this.firstOfFiscalWeekDate.setTime(currentTime);
add(Calendar.DAY_OF_MONTH, 6);
this.lastOfFiscalWeekDate.setTime(getTimeInMillis());
//reset calendar back to current time
setTimeInMillis(currentTime);
complete();
if (this.lastOfFiscalWeekDate.after(this.lastOfFiscalYearDate)) {
this.lastOfFiscalWeekDate.setTime(
this.lastOfFiscalYearDate.getTime());
}
}
}
}
// End TimeDimensionInternal.java
| |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.dalvik;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.FieldVisitor;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Array;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import javax.annotation.Nullable;
/**
* Tool to get stats about dalvik classes.
*/
public class DalvikStatsTool {
/** Utility class: do not instantiate */
private DalvikStatsTool() {}
// Reasonable defaults based on dreiss's observations.
private static final ImmutableMap<Pattern, Integer> PENALTIES =
ImmutableMap.<Pattern, Integer>builder()
.put(Pattern.compile("Layout$"), 1500)
.put(Pattern.compile("View$"), 1500)
.put(Pattern.compile("ViewGroup$"), 1800)
.put(Pattern.compile("Activity$"), 1100)
.build();
// DX translates MULTIANEWARRAY into a method call that matches this (owner,name,desc)
private static final String MULTIARRAY_OWNER = Type.getType(Array.class).getInternalName();
private static final String MULTIARRAY_NAME = "newInstance";
private static final String MULTIARRAY_DESC = Type.getMethodType(
Type.getType(Object.class),
Type.getType(Class.class),
Type.getType("[" + Type.INT_TYPE.getDescriptor())).getDescriptor();
public static class MethodReference {
public final String className;
public final String methodName;
public final String methodDesc;
public MethodReference(String className, String methodName, String methodDesc) {
this.className = className;
this.methodName = methodName;
this.methodDesc = methodDesc;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof MethodReference)) {
return false;
}
MethodReference that = (MethodReference) o;
if (className != null ? !className.equals(that.className) : that.className != null) {
return false;
}
if (methodDesc != null ? !methodDesc.equals(that.methodDesc) : that.methodDesc != null) {
return false;
}
if (methodName != null ? !methodName.equals(that.methodName) : that.methodName != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = className != null ? className.hashCode() : 0;
result = 31 * result + (methodName != null ? methodName.hashCode() : 0);
result = 31 * result + (methodDesc != null ? methodDesc.hashCode() : 0);
return result;
}
@Override
public String toString() {
return className + "." + methodName + ":" + methodDesc;
}
}
/**
* Stats about a java class.
*/
public static class Stats {
public static final Stats ZERO = new Stats(0, ImmutableSet.<MethodReference>of());
/** Estimated bytes the class will contribute to Dalvik linear alloc. */
public final int estimatedLinearAllocSize;
/** Methods referenced by the class. */
public final ImmutableSet<MethodReference> methodReferences;
public Stats(int estimatedLinearAllocSize, Set<MethodReference> methodReferences) {
this.estimatedLinearAllocSize = estimatedLinearAllocSize;
this.methodReferences = ImmutableSet.copyOf(methodReferences);
}
}
/**
* CLI wrapper to run against every class in a set of JARs.
*/
public static void main(String[] args) throws IOException {
for (String fname : args) {
try (ZipFile inJar = new ZipFile(fname)) {
for (ZipEntry entry : Collections.list(inJar.entries())) {
if (!entry.getName().endsWith(".class")) {
continue;
}
InputStream rawClass = inJar.getInputStream(entry);
int footprint = getEstimate(rawClass).estimatedLinearAllocSize;
System.out.println(footprint + "\t" + entry.getName().replace(".class", ""));
}
}
}
}
/**
* Estimates the footprint that a given class will have in the LinearAlloc buffer
* of Android's Dalvik VM.
*
* @param rawClass Raw bytes of the Java class to analyze.
* @return the estimate
*/
public static Stats getEstimate(InputStream rawClass) throws IOException {
ClassReader classReader = new ClassReader(rawClass);
return getEstimateInternal(classReader);
}
/**
* Estimates the footprint that a given class will have in the LinearAlloc buffer
* of Android's Dalvik VM.
*
* @param classReader reader containing the Java class to analyze.
* @return the estimate
*/
@VisibleForTesting
static Stats getEstimateInternal(ClassReader classReader) throws IOException {
// SKIP_FRAMES was required to avoid an exception in ClassReader when running on proguard
// output. We don't need to visit frames so this isn't an issue.
StatsClassVisitor statsVisitor = new StatsClassVisitor(PENALTIES);
classReader.accept(statsVisitor, ClassReader.SKIP_FRAMES);
return new Stats(
statsVisitor.footprint,
statsVisitor.methodReferenceBuilder.build());
}
private static class StatsClassVisitor extends ClassVisitor {
private final ImmutableMap<Pattern, Integer> penalties;
private final MethodVisitor methodVisitor = new StatsMethodVisitor();
private int footprint;
private boolean isInterface;
private ImmutableSet.Builder<MethodReference> methodReferenceBuilder;
private String className;
private StatsClassVisitor(Map<Pattern, Integer> penalties) {
super(Opcodes.ASM4);
this.penalties = ImmutableMap.copyOf(penalties);
this.methodReferenceBuilder = ImmutableSet.builder();
}
@Override
public void visit(
int version,
int access,
String name,
String signature,
String superName,
String[] interfaces) {
this.className = name;
if ((access & (Opcodes.ACC_INTERFACE)) != 0) {
// Interfaces don't have vtables.
// This might undercount annotations, but they are mostly small.
isInterface = true;
} else {
// Some parent classes have big vtable footprints. We try to estimate the parent vtable
// size based on the name of the class and parent class. This seems to work reasonably
// well in practice because the heaviest vtables are View and Activity, and many of those
// classes have clear names and cannot be obfuscated.
// Non-interfaces inherit the java.lang.Object vtable, which is 48 bytes.
int vtablePenalty = 48;
String[] names = new String[]{name, superName};
for (Map.Entry<Pattern, Integer> entry : penalties.entrySet()) {
for (String cls : names) {
if (entry.getKey().matcher(cls).find()) {
vtablePenalty = Math.max(vtablePenalty, entry.getValue());
}
}
}
footprint += vtablePenalty;
}
}
@Override
@Nullable
public FieldVisitor visitField(
int access, String name, String desc, String signature, Object value) {
// For non-static fields, Field objects are 16 bytes.
if ((access & Opcodes.ACC_STATIC) == 0) {
footprint += 16;
}
return null;
}
@Override
@Nullable
public MethodVisitor visitMethod(
int access, String name, String desc, String signature, String[] exceptions) {
// Method objects are 52 bytes.
footprint += 52;
// For non-interfaces, each virtual method adds another 4 bytes to the vtable.
if (!isInterface) {
boolean isDirect =
((access & (Opcodes.ACC_PRIVATE | Opcodes.ACC_STATIC)) != 0) ||
name.equals("<init>");
if (!isDirect) {
footprint += 4;
}
}
methodReferenceBuilder.add(new MethodReference(className, name, desc));
return methodVisitor;
}
@Override
public void visitOuterClass(String owner, String name, String desc) {
super.visitOuterClass(owner, name, desc);
if (name != null) {
methodReferenceBuilder.add(new MethodReference(className, name, desc));
}
}
private class StatsMethodVisitor extends MethodVisitor {
public StatsMethodVisitor() {
super(Opcodes.ASM4);
}
@Override
public void visitMethodInsn(int opcode, String owner, String name, String desc) {
super.visitMethodInsn(opcode, owner, name, desc);
methodReferenceBuilder.add(new MethodReference(owner, name, desc));
}
@Override
public void visitMultiANewArrayInsn(String desc, int dims) {
// dx translates this instruction into a method invocation on
// Array.newInstance(Class clazz, int...dims);
methodReferenceBuilder.add(
new MethodReference(MULTIARRAY_OWNER, MULTIARRAY_NAME, MULTIARRAY_DESC));
}
}
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Locates and parses the VxWorks symbol table. Names the table "vxSymTbl"
// and names the table length variable "vxSymTblLen" (if the length variable
// appears either directly before or after the symbol table). Defines the
// symbol table as SYMBOL[vxSymTblLen].
//
// Extracts symbol name, location, and type from each entry. Disassembles,
// creates, and names functions. Names global variables.
//
// Any existing Ghidra symbol table entries that collide with VxWorks symbol
// table entries are deleted. Mangled C++ symbol names are demangled.
//
// The VxWorks symbol table is an array [0..n-1] of (struct SYMBOL) entries.
// The table may be immediately followed or preceeded by an (int) vxSymTblLen
// value.
//
// Prerequisites:
//
// - Program memory block(s) is(are) aligned with actual load addresses
// (run something like MemAlignARM_LE.java)
//
// - Symbol table cannot be in a memory block with a name that contains
// the string "text" or "bss"
//
// - Modify getVxSymbolClass() to recognize your program's VxWorks
// symbol table entry structure, if necessary
//
// @category VxWorks
import java.util.List;
import ghidra.app.cmd.disassemble.DisassembleCommand;
import ghidra.app.cmd.label.DemanglerCmd;
import ghidra.app.plugin.core.analysis.AutoAnalysisManager;
import ghidra.app.script.GhidraScript;
import ghidra.app.services.DataTypeManagerService;
import ghidra.app.util.demangler.DemangledException;
import ghidra.app.util.demangler.gnu.GnuDemangler;
import ghidra.program.model.address.Address;
import ghidra.program.model.address.AddressSet;
import ghidra.program.model.data.*;
import ghidra.program.model.listing.Data;
import ghidra.program.model.listing.Instruction;
import ghidra.program.model.mem.MemoryBlock;
import ghidra.program.model.symbol.SourceType;
import ghidra.program.model.symbol.Symbol;
public class VxWorksSymTab_Finder extends GhidraScript {
boolean debug = false;
//------------------------------------------------------------------------
// getDataTypeManagerByName
//
// Retrieves data type manager by name.
//
// Returns:
// Success: DataTypeManager
// Failure: null
//------------------------------------------------------------------------
private DataTypeManager getDataTypeManagerByName(String name) {
DataTypeManagerService service = state.getTool().getService(DataTypeManagerService.class);
// Loop through all managers in the data type manager service
for (DataTypeManager manager : service.getDataTypeManagers()) {
if (manager.getName().equals(name)) {
return manager;
}
}
return null;
}
//------------------------------------------------------------------------
// VxSymbol
//
// Contains a SYMBOL data type representing a VxWorks symbol table entry
// and several associated methods.
//------------------------------------------------------------------------
private class VxSymbol {
StructureDataType dt = null;
int nameOffset = 0;
int locOffset = 0;
int typeOffset = 0;
int length = 0;
public VxSymbol(StructureDataType struct) {
dt = struct;
nameOffset = getFieldOffset(dt, "symNameOff");
locOffset = getFieldOffset(dt, "symLocOff");
typeOffset = getFieldOffset(dt, "symType");
length = dt.getLength();
}
private int getFieldOffset(StructureDataType dataType, String name) {
for (DataTypeComponent comp : dataType.getComponents()) {
if (comp.getFieldName().equals(name)) {
return comp.getOffset();
}
}
return -1;
}
public DataType dataType() {
return dt;
}
public int length() {
return length;
}
public int nameOffset() {
return nameOffset;
}
public int locOffset() {
return locOffset;
}
public int typeOffset() {
return typeOffset;
}
// Add SYMBOL data type to Program DataTypeManager
// (if data type already exists, replace it)
public void createGhidraType() {
currentProgram.getDataTypeManager()
.addDataType(dt,
DataTypeConflictHandler.REPLACE_HANDLER);
}
}
//------------------------------------------------------------------------
// getVxSymbolClass
//
// Creates a SYMBOL structure data type and uses it to create a new
// VxSymbol class instance.
//
// Returns:
// Success: VxSymbol
// Failure: null
//------------------------------------------------------------------------
private VxSymbol getVxSymbolClass(int type) {
// Pre-define base data types used to define symbol table entry data type
DataTypeManager builtin = getDataTypeManagerByName("BuiltInTypes");
DataType charType = builtin.getDataType("/char");
DataType charPtrType = PointerDataType.getPointer(charType, 4);
DataType byteType = builtin.getDataType("/byte");
DataType ushortType = builtin.getDataType("/ushort");
DataType intType = builtin.getDataType("/int");
DataType uintType = builtin.getDataType("/uint");
DataType voidType = builtin.getDataType("/void");
DataType voidPtrType = PointerDataType.getPointer(voidType, 4);
// Define a SYMBOL data type (try to put most common first).
// Each SYMBOL data type must include at least 3 fields named
// symNameOff, symLocOff, and symType.
StructureDataType dt = null;
switch (type) {
case 0:
// Version 5.4, 6.4 and 6.8
//
// Total length: 0x14 bytes
// 0x00 uint symHashNode // NULL
// 0x04 char *symNameOff
// 0x08 void *symLocOff
// 0x0c int NULL
// 0x10 ushort symGroup
// 0x12 uchar symType
// 0x13 uchar undef
dt = new StructureDataType("SYMBOL", 0x14);
dt.replaceAtOffset(0, uintType, 4, "symHashNode", "");
dt.replaceAtOffset(4, charPtrType, 4, "symNameOff", "");
dt.replaceAtOffset(8, voidPtrType, 4, "symLocOff", "");
dt.replaceAtOffset(0x0c, intType, 4, "", "");
dt.replaceAtOffset(0x10, ushortType, 2, "symGroup", "");
dt.replaceAtOffset(0x12, byteType, 1, "symType", "");
break;
case 1:
// Version 6.1
//
// Total length: 0x18 bytes
// 0x00 uint symHashNode // NULL
// 0x04 char *symNameOff
// 0x08 void *symLocOff
// 0x0c int NULL
// 0x10 int NULL
// 0x14 uchar symType
// 0x15 uchar undef[3]
dt = new StructureDataType("SYMBOL", 0x18);
dt.replaceAtOffset(0, uintType, 4, "symHashNode", "");
dt.replaceAtOffset(4, charPtrType, 4, "symNameOff", "");
dt.replaceAtOffset(8, voidPtrType, 4, "symLocOff", "");
dt.replaceAtOffset(0x0c, intType, 4, "", "");
dt.replaceAtOffset(0x10, intType, 4, "", "");
dt.replaceAtOffset(0x14, byteType, 1, "symType", "");
break;
case 2:
// Unknown VxWorks version(s)
//
// Total length: 0x1c bytes
// 0x00 uint symHashNode // NULL
// 0x04 char *symNameOff
// 0x08 void *symLocOff
// 0x0c int unk; // no clear pattern to values
// 0x10 int NULL
// 0x14 int NULL
// 0x18 uchar symType
// 0x19 uchar undef[3]
dt = new StructureDataType("SYMBOL", 0x1c);
dt.replaceAtOffset(0, uintType, 4, "symHashNode", "");
dt.replaceAtOffset(4, charPtrType, 4, "symNameOff", "");
dt.replaceAtOffset(8, voidPtrType, 4, "symLocOff", "");
dt.replaceAtOffset(0x0c, intType, 4, "", "");
dt.replaceAtOffset(0x10, intType, 4, "", "");
dt.replaceAtOffset(0x14, intType, 4, "", "");
dt.replaceAtOffset(0x18, byteType, 1, "symType", "");
break;
case 3:
// Version 5.5
//
// Total length: 0x10 bytes
// 0x00 uint symHashNode // NULL
// 0x04 char *symNameOff
// 0x08 void *symLocOff
// 0x0c ushort symGroup // NULL
// 0x0e uchar symType
// 0x0f uchar undef
dt = new StructureDataType("SYMBOL", 0x10);
dt.replaceAtOffset(0, uintType, 4, "symHashNode", "");
dt.replaceAtOffset(4, charPtrType, 4, "symNameOff", "");
dt.replaceAtOffset(8, voidPtrType, 4, "symLocOff", "");
dt.replaceAtOffset(0x0c, ushortType, 2, "symGroup", "");
dt.replaceAtOffset(0x0e, byteType, 1, "symType", "");
break;
default:
return null;
}
// Return a VxSymbol class for this SYMBOL data type
return new VxSymbol(dt);
}
//------------------------------------------------------------------------
// isExecute
//
// Is address in an executable memory block?
//------------------------------------------------------------------------
private boolean isExecute(Address addr) {
// Search all program memory blocks
for (MemoryBlock block : getMemoryBlocks()) {
if (block.contains(addr)) {
return block.isExecute();
}
}
return false;
}
//------------------------------------------------------------------------
// isAddress
//
// Is offset in an existing memory block?
//------------------------------------------------------------------------
private boolean isAddress(long offset) {
// Search all program memory blocks
for (MemoryBlock block : getMemoryBlocks()) {
if (block.getStart().getOffset() <= offset && block.getEnd().getOffset() >= offset) {
return true;
}
}
return false; // no match
}
//------------------------------------------------------------------------
// isAddress
//
// Is offset in the specified memory block?
//------------------------------------------------------------------------
private boolean isAddress(long offset, MemoryBlock block) {
if (block.getStart().getOffset() <= offset && block.getEnd().getOffset() >= offset) {
return true;
}
return false;
}
//------------------------------------------------------------------------
// isString
//
// Are the bytes starting at addr a C string?
//
// Algorithm: Scan bytes until finding either an invalid char or null.
// If scan stops at null, return true -- else false.
//------------------------------------------------------------------------
private boolean isString(Address addr) {
byte _byte;
try {
_byte = getByte(addr);
}
catch (Exception except) {
return false;
}
while ( // May need to add valid character examples here.
(_byte == 0x09 || _byte == 0x0a || _byte == 0x0d || (_byte > 0x19 && _byte < 0x80)) &&
_byte != 0x00) {
if (monitor.isCancelled()) {
return false;
}
addr = addr.add(1);
try {
_byte = getByte(addr);
}
catch (Exception except) {
return false;
}
}
if (_byte == 0x00) {
return true; // Scan stopped at null.
}
return false; // Scan stopped at invalid char.
}
//------------------------------------------------------------------------
// clearString
//
// Remove data or instructions that overlap the null-terminated
// string at addr (may happen if disassembly creates invalid references
// or compiler optimization creates shared strings).
//
// Use get*Containing() in case a string that ends with the string
// at addr has already been defined (e.g., the string at addr is
// "CoolFunc" and the string "g_pfCoolFunc" overlaps it).
//------------------------------------------------------------------------
private void clearString(Address addr) throws Exception {
Data data;
Instruction inst;
// Clear the string, breaking on the terminating null character
while (getByte(addr) != 0) {
data = getDataContaining(addr);
if (data != null) {
removeDataAt(data.getAddress());
}
inst = getInstructionContaining(addr);
if (inst != null) {
removeInstructionAt(inst.getAddress());
}
addr = addr.add(1);
}
// Now clear at string's terminating null character
data = getDataContaining(addr);
if (data != null) {
removeDataAt(data.getAddress());
}
inst = getInstructionContaining(addr);
if (inst != null) {
removeInstructionAt(inst.getAddress());
}
}
//------------------------------------------------------------------------
// isSymTblEntry
//
// Does data pointed to by entry look like a VxWorks symbol table entry?
// Test is weak.
//------------------------------------------------------------------------
private boolean isSymTblEntry(Address entry, VxSymbol vxSymbol) throws Exception {
// First dword must be null or a valid ptr (typically into the sym table)
long value = getInt(entry) & 0xffffffffL;
if ((value != 0) && !isAddress(value)) {
if (debug) {
println("1: " + entry + " --> " + Long.toHexString(value));
}
return false;
}
// symNameOff field must point to a non-null C string
value = getInt(entry.add(vxSymbol.nameOffset())) & 0xffffffffL;
if (!isAddress(value)) {
if (debug) {
println("2: " + entry + " --> " + Long.toHexString(value));
}
return false;
}
Address symNameAddr = toAddr(value);
if (!isString(symNameAddr)) {
if (debug) {
println("3: " + entry + " --> " + Long.toHexString(value));
}
return false;
}
if (getByte(symNameAddr) == 0) {
return false;
}
// symLocOff field can be almost anything (e.g., external mem ref)
//value = (long)getInt(entry.add(vxSymbol.locOffset())) & 0xffffffffL;
//if (value == 0) {
// if (debug) println("4: " + entry);
// return false;
//}
// symType field must be recognized type code (this test is weak)
byte symType = getByte(entry.add(vxSymbol.typeOffset()));
if (!isValidSymType(symType)) {
if (debug) {
println("5: " + entry + " --> " + symType);
}
return false;
}
return true;
}
private boolean isValidSymType(byte symType) {
switch (symType) {
case 0: // Undefined Symbol
return false;
case 2: // Local Absolute
case 3: // Global Absolute
case 6: // Local Data
case 7: // Global Data
case 8: // Local BSS
case 9: // Global BSS
case 4: // Local .text
case 5: // Global .text
case 0x11: // External ref -- ignore
return true;
default:
return false;
}
}
//------------------------------------------------------------------------
// findSymTbl
//
// Searches all memory blocks for data that looks like a run of testLen
// VxWorks symbol table entries.
//
// Returns:
// Success: table address
// Failure: null
//------------------------------------------------------------------------
private Address findSymTbl(VxSymbol vxSymbol) throws Exception {
int testLen = 100; // number of symbol tbl entries to look for
boolean hasNonExecute = checkNonExecute();
// Iterate through all memory blocks
for (MemoryBlock block : currentProgram.getMemory().getBlocks()) {
// Skip code/execute blocks if there are non-execute blocks,
// otherwise search everything.
if (hasNonExecute && block.isExecute()) {
continue;
}
// skip uninit
if (!block.isInitialized()) {
continue;
}
// Search current block for run of testLen symbol table entries
int testBlkSize = vxSymbol.length * testLen;
printf(" block: " + block.getName() + " (" + block.getStart() + ", " +
block.getEnd() + ") ");
printf("testBlkSize = " + Integer.toHexString(testBlkSize) + " ");
System.out.flush();
long prevOffset = 0;
Address cursor = block.getStart();
while ((cursor != null) && isAddress(cursor.getOffset() + testBlkSize, block)) {
// Script cancel check and visual feedback
if (monitor.isCancelled()) {
return null;
}
if ((cursor.getOffset() - prevOffset) >= 0x100000) {
printf(".");
System.out.flush();
prevOffset = cursor.getOffset();
}
// Determine whether cursor now points to a symbol table
int i = 0;
for (Address entry = cursor; isSymTblEntry(entry, vxSymbol) &&
(i < testLen); entry = entry.add(vxSymbol.length()), i++) {
}
if (i == testLen) {
// May have symbol table -- verify length
if (getSymTblLen(cursor, vxSymbol) != 0) {
printf("\n");
System.out.flush();
return cursor; // found table -- stop searching
}
if (debug) {
printf("Possible symbol table at " + cursor + " has length error\n");
}
}
cursor = cursor.add(4);
}
printf("\n");
printf(" search terminated at: " + cursor + "\n");
System.out.flush();
}
return null;
}
private boolean checkNonExecute() {
for (MemoryBlock block : currentProgram.getMemory().getBlocks()) {
if (!block.isExecute()) {
return true;
}
}
return false;
}
//------------------------------------------------------------------------
// getSymTblLen
//
// Counts number of entries in VxWorks table at address symTbl.
//
// Returns:
// Success: number of table entries (> 0)
// Failure: 0
//------------------------------------------------------------------------
private int getSymTblLen(Address symTbl, VxSymbol vxSymbol) throws Exception {
Address entry = symTbl;
int j = 0;
while (isSymTblEntry(entry, vxSymbol)) {
entry = entry.add(vxSymbol.length());
j++;
}
return j;
// NOTE: Found an example of a VxWorks symbol table that was not
// directly adjacent to the symbol table length variable...
// so removed the following constraint.
/*
// Symbol table length entry may be at beginning or end of the symbol
// table...so compare computed length with both values. If either
// matches, table length is verified.
if ((j == getInt(entry)) || (j == getInt(symTbl.subtract(4)))) {
return j;
} else {
return 0;
}
*/
}
/**
* Look before/after the table to see if there is a size value there and mark it if it agrees with TableLen
*
* @param symTbl
* @param vxSymbol
* @param tableLen
* @throws Exception
*/
private void markSymbolTableLen(Address symTbl, VxSymbol vxSymbol, int symTblLen)
throws Exception {
// NOTE: Found an example of a VxWorks symbol table that was not
// directly adjacent to the symbol table length variable...
// Name the VxWorks symbol length variable
// (if it appears either directly before or after the symbol table)
Address symTblLenPtr = null;
long foreOff = symTbl.getOffset() - 4;
long aftOff = symTbl.getOffset() + symTblLen * vxSymbol.length();
if (isAddress(foreOff) && getInt(toAddr(foreOff)) == symTblLen) {
symTblLenPtr = toAddr(foreOff);
}
else if (isAddress(aftOff) && getInt(toAddr(aftOff)) == symTblLen) {
symTblLenPtr = toAddr(aftOff);
}
if (symTblLenPtr != null) {
removeConflictingSymbols("vxSymTblLen", symTblLenPtr);
createLabel(symTblLenPtr, "vxSymTblLen", true);
createDWord(symTblLenPtr);
}
else {
println("Warning: Symbol Table Size not found before of after table");
}
}
//------------------------------------------------------------------------
// removeConflictingSymbols
//
// Deletes all symbols with the same name and the primary symbol at addr.
//------------------------------------------------------------------------
private void removeConflictingSymbols(String name, Address addr) {
// Delete any existing symbols with the same name
for (Symbol sym : currentProgram.getSymbolTable().getSymbols(name)) {
sym.delete();
}
// Delete primary Ghidra symbol at the same address
Symbol sym = getSymbolAt(addr);
if (sym != null) {
sym.delete();
}
return;
}
//------------------------------------------------------------------------
// applyDemangled
//
// Apply demangled symbol name to symbol.
//------------------------------------------------------------------------
private void applyDemangled(Address addr, String mangled, String demangled) {
if (demangled != null) {
new DemanglerCmd(addr, mangled).applyTo(currentProgram, monitor);
List<Symbol> symbols =
getSymbols(mangled, currentProgram.getGlobalNamespace());
if (!symbols.isEmpty()) {
currentProgram.getSymbolTable().removeSymbolSpecial(symbols.get(0));
}
}
return;
}
//------------------------------------------------------------------------
// doLocalDisassemble
//
// Do our own disassembly, and don't let auto-analysis start until after
// this script finishes. This speeds up the script substantially and
// allows auto-analysis to operate with more information (and code/data
// that isn't rapidly changing).
//------------------------------------------------------------------------
private void doLocalDisassemble(Address addr) {
// Only disassemble in memory blocks marked executable
if (!isExecute(addr)) {
return;
}
DisassembleCommand cmd = new DisassembleCommand(addr, null, true);
cmd.enableCodeAnalysis(false); // Queues changes up for later analysis
cmd.applyTo(currentProgram, monitor);
AddressSet set = cmd.getDisassembledAddressSet();
AutoAnalysisManager.getAnalysisManager(currentProgram).codeDefined(set);
return;
}
//------------------------------------------------------------------------
// getScriptAnalysisMode
//
// Force auto-analysis to wait until script completes.
//------------------------------------------------------------------------
@Override
public AnalysisMode getScriptAnalysisMode() {
return AnalysisMode.SUSPENDED;
}
//========================================================================
// Main
//========================================================================
@Override
public void run() throws Exception {
// Find VxWorks symbol table
Address symTbl = null;
VxSymbol vxSymbol = getVxSymbolClass(0);
for (int i = 0; ((vxSymbol != null) && !monitor.isCancelled()); i++, vxSymbol =
getVxSymbolClass(i)) {
println("Searching for symbol table variant " + i);
if ((symTbl = findSymTbl(vxSymbol)) != null) {
break;
}
}
if (vxSymbol == null) {
return;
}
int symTblLen = getSymTblLen(symTbl, vxSymbol);
println("Symbol table at " + symTbl + " (" + symTblLen + " entries)");
// Name the VxWorks symbol table
removeConflictingSymbols("vxSymTbl", symTbl);
createLabel(symTbl, "vxSymTbl", true);
markSymbolTableLen(symTbl, vxSymbol, symTblLen);
// Create symbol data type and symbol table structure
println("Creating SYMBOL data type and symbol table structure...");
vxSymbol.createGhidraType();
clearListing(symTbl, symTbl.add(symTblLen * vxSymbol.length() - 1));
createData(symTbl, new ArrayDataType(vxSymbol.dataType(), symTblLen, vxSymbol.length()));
// Create a GNU demangler instance
GnuDemangler demangler = new GnuDemangler();
if (!demangler.canDemangle(currentProgram)) {
println("Unable to create demangler.");
return;
}
// Process VxWorks symbol table entries
println("Processing symbol table entries.");
Address symEntry = symTbl;
for (int i = 0; (i < symTblLen) && !monitor.isCancelled(); i++, symEntry =
symEntry.add(vxSymbol.length())) {
// Extract symbol table entry values
Address symNameAddr = toAddr(getInt(symEntry.add(vxSymbol.nameOffset())) & 0xffffffffL);
Address symLoc = toAddr(getInt(symEntry.add(vxSymbol.locOffset())) & 0xffffffffL);
byte symType = getByte(symEntry.add(vxSymbol.typeOffset()));
// Remove any data or instructions that overlap string at *symNameAddr
clearString(symNameAddr);
// Turn *symNameAddr into a string and store it in symName
String symName;
try {
symName = (String) createAsciiString(symNameAddr).getValue();
}
catch (Exception e) {
println("createAsciiString: caught exception...");
println(e.getMessage());
println(e.toString());
return;
}
if (symName.length() > 2000) {
symName = symName.substring(0, 2000);
}
// Demangle symName
String symDemangledName = null;
try {
symDemangledName = demangler.demangle(symName).getSignature(false);
}
catch (DemangledException e) { // report demangling error
if (!e.isInvalidMangledName()) {
println("demangle: Demangling error");
}
}
catch (RuntimeException e) { // ignore unmangled symNames
}
// Status update
if (symDemangledName != null) {
println("i=" + i + ", nameAddr: " + symNameAddr + ", loc: " + symLoc + ", type: " +
symType + ", name: " + symName + ", demangled: " + symDemangledName);
}
else {
println("i=" + i + ", nameAddr: " + symNameAddr + ", loc: " + symLoc + ", type: " +
symType + ", name: " + symName);
}
// Clear any conflicting symbols from the Ghidra symbol table
removeConflictingSymbols(symName, symLoc);
// If entry type is data, simply create a Ghidra symbol for it.
// If entry type is code, disassemble it and create function.
switch (symType) {
case 0: // Undefined Symbol
println("NULL symType!");
break;
case 2: // Local Absolute
case 3: // Global Absolute
case 6: // Local Data
case 7: // Global Data
case 8: // Local BSS
case 9: // Global BSS
createLabel(symLoc, symName, true);
applyDemangled(symLoc, symName, symDemangledName);
break;
case 4: // Local .text
case 5: // Global .text
doLocalDisassemble(symLoc);
createFunction(symLoc, symName);
if (getFunctionAt(symLoc) != null) {
getFunctionAt(symLoc).setName(symName, SourceType.USER_DEFINED);
applyDemangled(symLoc, symName, symDemangledName);
}
else {
println("createFunction: Failed to create function");
createLabel(symLoc, symName, true);
applyDemangled(symLoc, symName, symDemangledName);
}
break;
case 0x11: // External ref -- ignore
break;
default:
println("Invalid symType!");
break;
}
}
}
}
| |
package zoo.core;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import java.sql.Time;
/**
* Created by michael on 2017-04-13.
*/
@Entity
public class Employe {
private long codeemploye;
private String nas;
private String nom;
private String prenom;
private String nomjeunefille;
private Time datenaissance;
private String adresse;
private String sexe;
private String telephone;
private String fonction;
private String service;
private Long taux;
private String grade;
@Id
@Column(name = "CODEEMPLOYE")
public long getCodeemploye() {
return codeemploye;
}
public void setCodeemploye(long codeemploye) {
this.codeemploye = codeemploye;
}
@Basic
@Column(name = "NAS")
public String getNas() {
return nas;
}
public void setNas(String nas) {
this.nas = nas;
}
@Basic
@Column(name = "NOM")
public String getNom() {
return nom;
}
public void setNom(String nom) {
this.nom = nom;
}
@Basic
@Column(name = "PRENOM")
public String getPrenom() {
return prenom;
}
public void setPrenom(String prenom) {
this.prenom = prenom;
}
@Basic
@Column(name = "NOMJEUNEFILLE")
public String getNomjeunefille() {
return nomjeunefille;
}
public void setNomjeunefille(String nomjeunefille) {
this.nomjeunefille = nomjeunefille;
}
@Basic
@Column(name = "DATENAISSANCE")
public Time getDatenaissance() {
return datenaissance;
}
public void setDatenaissance(Time datenaissance) {
this.datenaissance = datenaissance;
}
@Basic
@Column(name = "ADRESSE")
public String getAdresse() {
return adresse;
}
public void setAdresse(String adresse) {
this.adresse = adresse;
}
@Basic
@Column(name = "SEXE")
public String getSexe() {
return sexe;
}
public void setSexe(String sexe) {
this.sexe = sexe;
}
@Basic
@Column(name = "TELEPHONE")
public String getTelephone() {
return telephone;
}
public void setTelephone(String telephone) {
this.telephone = telephone;
}
@Basic
@Column(name = "FONCTION")
public String getFonction() {
return fonction;
}
public void setFonction(String fonction) {
this.fonction = fonction;
}
@Basic
@Column(name = "SERVICE")
public String getService() {
return service;
}
public void setService(String service) {
this.service = service;
}
@Basic
@Column(name = "TAUX")
public Long getTaux() {
return taux;
}
public void setTaux(Long taux) {
this.taux = taux;
}
@Basic
@Column(name = "GRADE")
public String getGrade() {
return grade;
}
public void setGrade(String grade) {
this.grade = grade;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Employe employe = (Employe) o;
if (codeemploye != employe.codeemploye) return false;
if (nas != null ? !nas.equals(employe.nas) : employe.nas != null) return false;
if (nom != null ? !nom.equals(employe.nom) : employe.nom != null) return false;
if (prenom != null ? !prenom.equals(employe.prenom) : employe.prenom != null) return false;
if (nomjeunefille != null ? !nomjeunefille.equals(employe.nomjeunefille) : employe.nomjeunefille != null)
return false;
if (datenaissance != null ? !datenaissance.equals(employe.datenaissance) : employe.datenaissance != null)
return false;
if (adresse != null ? !adresse.equals(employe.adresse) : employe.adresse != null) return false;
if (sexe != null ? !sexe.equals(employe.sexe) : employe.sexe != null) return false;
if (telephone != null ? !telephone.equals(employe.telephone) : employe.telephone != null) return false;
if (fonction != null ? !fonction.equals(employe.fonction) : employe.fonction != null) return false;
if (service != null ? !service.equals(employe.service) : employe.service != null) return false;
if (taux != null ? !taux.equals(employe.taux) : employe.taux != null) return false;
if (grade != null ? !grade.equals(employe.grade) : employe.grade != null) return false;
return true;
}
@Override
public int hashCode() {
int result = (int) (codeemploye ^ (codeemploye >>> 32));
result = 31 * result + (nas != null ? nas.hashCode() : 0);
result = 31 * result + (nom != null ? nom.hashCode() : 0);
result = 31 * result + (prenom != null ? prenom.hashCode() : 0);
result = 31 * result + (nomjeunefille != null ? nomjeunefille.hashCode() : 0);
result = 31 * result + (datenaissance != null ? datenaissance.hashCode() : 0);
result = 31 * result + (adresse != null ? adresse.hashCode() : 0);
result = 31 * result + (sexe != null ? sexe.hashCode() : 0);
result = 31 * result + (telephone != null ? telephone.hashCode() : 0);
result = 31 * result + (fonction != null ? fonction.hashCode() : 0);
result = 31 * result + (service != null ? service.hashCode() : 0);
result = 31 * result + (taux != null ? taux.hashCode() : 0);
result = 31 * result + (grade != null ? grade.hashCode() : 0);
return result;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.mapreduce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormat.TableSnapshotRegionSplit;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.junit.After;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import java.util.Arrays;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
import org.apache.hadoop.hbase.util.FSUtils;
@Category({VerySlowMapReduceTests.class, LargeTests.class})
public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBase {
private static final Log LOG = LogFactory.getLog(TestTableSnapshotInputFormat.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
private static final byte[] bbb = Bytes.toBytes("bbb");
private static final byte[] yyy = Bytes.toBytes("yyy");
@Rule
public TestName name = new TestName();
@Override
protected byte[] getStartRow() {
return bbb;
}
@Override
protected byte[] getEndRow() {
return yyy;
}
@After
public void tearDown() throws Exception {
}
@Test
public void testGetBestLocations() throws IOException {
TableSnapshotInputFormatImpl tsif = new TableSnapshotInputFormatImpl();
Configuration conf = UTIL.getConfiguration();
HDFSBlocksDistribution blockDistribution = new HDFSBlocksDistribution();
Assert.assertEquals(Lists.newArrayList(),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1);
Assert.assertEquals(Lists.newArrayList("h1"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1);
Assert.assertEquals(Lists.newArrayList("h1"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 1);
Assert.assertEquals(Lists.newArrayList("h1"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution = new HDFSBlocksDistribution();
blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 10);
blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 7);
blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 5);
blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 1);
Assert.assertEquals(Lists.newArrayList("h1"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 2);
Assert.assertEquals(Lists.newArrayList("h1", "h2"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 3);
Assert.assertEquals(Lists.newArrayList("h2", "h1"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 6);
blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 9);
Assert.assertEquals(Lists.newArrayList("h2", "h3", "h4", "h1"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
}
public static enum TestTableSnapshotCounters {
VALIDATION_ERROR
}
public static class TestTableSnapshotMapper
extends TableMapper<ImmutableBytesWritable, NullWritable> {
@Override
protected void map(ImmutableBytesWritable key, Result value,
Context context) throws IOException, InterruptedException {
// Validate a single row coming from the snapshot, and emit the row key
verifyRowFromMap(key, value);
context.write(key, NullWritable.get());
}
}
public static class TestTableSnapshotReducer
extends Reducer<ImmutableBytesWritable, NullWritable, NullWritable, NullWritable> {
HBaseTestingUtility.SeenRowTracker rowTracker =
new HBaseTestingUtility.SeenRowTracker(bbb, yyy);
@Override
protected void reduce(ImmutableBytesWritable key, Iterable<NullWritable> values,
Context context) throws IOException, InterruptedException {
rowTracker.addRow(key.get());
}
@Override
protected void cleanup(Context context) throws IOException,
InterruptedException {
rowTracker.validate();
}
}
@Test
public void testInitTableSnapshotMapperJobConfig() throws Exception {
setupCluster();
final TableName tableName = TableName.valueOf(name.getMethodName());
String snapshotName = "foo";
try {
createTableAndSnapshot(UTIL, tableName, snapshotName, getStartRow(), getEndRow(), 1);
Job job = new Job(UTIL.getConfiguration());
Path tmpTableDir = UTIL.getDataTestDirOnTestFS(snapshotName);
TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName,
new Scan(), TestTableSnapshotMapper.class, ImmutableBytesWritable.class,
NullWritable.class, job, false, tmpTableDir);
// TODO: would be better to examine directly the cache instance that results from this
// config. Currently this is not possible because BlockCache initialization is static.
Assert.assertEquals(
"Snapshot job should be configured for default LruBlockCache.",
HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT,
job.getConfiguration().getFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, -1), 0.01);
Assert.assertEquals(
"Snapshot job should not use BucketCache.",
0, job.getConfiguration().getFloat("hbase.bucketcache.size", -1), 0.01);
} finally {
UTIL.getAdmin().deleteSnapshot(snapshotName);
UTIL.deleteTable(tableName);
tearDownCluster();
}
}
@Override
public void testRestoreSnapshotDoesNotCreateBackRefLinksInit(TableName tableName,
String snapshotName, Path tmpTableDir) throws Exception {
Job job = new Job(UTIL.getConfiguration());
TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName,
new Scan(), TestTableSnapshotMapper.class, ImmutableBytesWritable.class,
NullWritable.class, job, false, tmpTableDir);
}
@Override
public void testWithMockedMapReduce(HBaseTestingUtility util, String snapshotName,
int numRegions, int expectedNumSplits) throws Exception {
setupCluster();
final TableName tableName = TableName.valueOf(name.getMethodName());
try {
createTableAndSnapshot(
util, tableName, snapshotName, getStartRow(), getEndRow(), numRegions);
Job job = new Job(util.getConfiguration());
Path tmpTableDir = util.getDataTestDirOnTestFS(snapshotName);
Scan scan = new Scan(getStartRow(), getEndRow()); // limit the scan
TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName,
scan, TestTableSnapshotMapper.class, ImmutableBytesWritable.class,
NullWritable.class, job, false, tmpTableDir);
verifyWithMockedMapReduce(job, numRegions, expectedNumSplits, getStartRow(), getEndRow());
} finally {
util.getAdmin().deleteSnapshot(snapshotName);
util.deleteTable(tableName);
tearDownCluster();
}
}
public static void blockUntilSplitFinished(HBaseTestingUtility util, TableName tableName,
int expectedRegionSize) throws Exception {
for (int i = 0; i < 100; i++) {
List<HRegionInfo> hRegionInfoList = util.getAdmin().getTableRegions(tableName);
if (hRegionInfoList.size() >= expectedRegionSize) {
break;
}
Thread.sleep(1000);
}
}
@Test
public void testNoDuplicateResultsWhenSplitting() throws Exception {
setupCluster();
TableName tableName = TableName.valueOf("testNoDuplicateResultsWhenSplitting");
String snapshotName = "testSnapshotBug";
try {
if (UTIL.getAdmin().tableExists(tableName)) {
UTIL.deleteTable(tableName);
}
UTIL.createTable(tableName, FAMILIES);
Admin admin = UTIL.getAdmin();
// put some stuff in the table
Table table = UTIL.getConnection().getTable(tableName);
UTIL.loadTable(table, FAMILIES);
// split to 2 regions
admin.split(tableName, Bytes.toBytes("eee"));
blockUntilSplitFinished(UTIL, tableName, 2);
Path rootDir = FSUtils.getRootDir(UTIL.getConfiguration());
FileSystem fs = rootDir.getFileSystem(UTIL.getConfiguration());
SnapshotTestingUtils.createSnapshotAndValidate(admin, tableName, Arrays.asList(FAMILIES),
null, snapshotName, rootDir, fs, true);
// load different values
byte[] value = Bytes.toBytes("after_snapshot_value");
UTIL.loadTable(table, FAMILIES, value);
// cause flush to create new files in the region
admin.flush(tableName);
table.close();
Job job = new Job(UTIL.getConfiguration());
Path tmpTableDir = UTIL.getDataTestDirOnTestFS(snapshotName);
// limit the scan
Scan scan = new Scan().withStartRow(getStartRow()).withStopRow(getEndRow());
TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName, scan,
TestTableSnapshotMapper.class, ImmutableBytesWritable.class, NullWritable.class, job, false,
tmpTableDir);
verifyWithMockedMapReduce(job, 2, 2, getStartRow(), getEndRow());
} finally {
UTIL.getAdmin().deleteSnapshot(snapshotName);
UTIL.deleteTable(tableName);
tearDownCluster();
}
}
private void verifyWithMockedMapReduce(Job job, int numRegions, int expectedNumSplits,
byte[] startRow, byte[] stopRow)
throws IOException, InterruptedException {
TableSnapshotInputFormat tsif = new TableSnapshotInputFormat();
List<InputSplit> splits = tsif.getSplits(job);
Assert.assertEquals(expectedNumSplits, splits.size());
HBaseTestingUtility.SeenRowTracker rowTracker =
new HBaseTestingUtility.SeenRowTracker(startRow, stopRow);
for (int i = 0; i < splits.size(); i++) {
// validate input split
InputSplit split = splits.get(i);
Assert.assertTrue(split instanceof TableSnapshotRegionSplit);
// validate record reader
TaskAttemptContext taskAttemptContext = mock(TaskAttemptContext.class);
when(taskAttemptContext.getConfiguration()).thenReturn(job.getConfiguration());
RecordReader<ImmutableBytesWritable, Result> rr =
tsif.createRecordReader(split, taskAttemptContext);
rr.initialize(split, taskAttemptContext);
// validate we can read all the data back
while (rr.nextKeyValue()) {
byte[] row = rr.getCurrentKey().get();
verifyRowFromMap(rr.getCurrentKey(), rr.getCurrentValue());
rowTracker.addRow(row);
}
rr.close();
}
// validate all rows are seen
rowTracker.validate();
}
@Override
protected void testWithMapReduceImpl(HBaseTestingUtility util, TableName tableName,
String snapshotName, Path tableDir, int numRegions, int expectedNumSplits,
boolean shutdownCluster) throws Exception {
doTestWithMapReduce(util, tableName, snapshotName, getStartRow(), getEndRow(), tableDir,
numRegions, expectedNumSplits, shutdownCluster);
}
// this is also called by the IntegrationTestTableSnapshotInputFormat
public static void doTestWithMapReduce(HBaseTestingUtility util, TableName tableName,
String snapshotName, byte[] startRow, byte[] endRow, Path tableDir, int numRegions,
int expectedNumSplits, boolean shutdownCluster) throws Exception {
LOG.info("testing with MapReduce");
LOG.info("create the table and snapshot");
createTableAndSnapshot(util, tableName, snapshotName, startRow, endRow, numRegions);
if (shutdownCluster) {
LOG.info("shutting down hbase cluster.");
util.shutdownMiniHBaseCluster();
}
try {
// create the job
Job job = new Job(util.getConfiguration());
Scan scan = new Scan(startRow, endRow); // limit the scan
job.setJarByClass(util.getClass());
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
TestTableSnapshotInputFormat.class);
TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName,
scan, TestTableSnapshotMapper.class, ImmutableBytesWritable.class,
NullWritable.class, job, true, tableDir);
job.setReducerClass(TestTableSnapshotInputFormat.TestTableSnapshotReducer.class);
job.setNumReduceTasks(1);
job.setOutputFormatClass(NullOutputFormat.class);
Assert.assertTrue(job.waitForCompletion(true));
} finally {
if (!shutdownCluster) {
util.getAdmin().deleteSnapshot(snapshotName);
util.deleteTable(tableName);
}
}
}
}
| |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.command;
import com.facebook.buck.artifact_cache.ArtifactCacheFactory;
import com.facebook.buck.command.config.BuildBuckConfig;
import com.facebook.buck.core.build.engine.cache.manager.BuildInfoStoreManager;
import com.facebook.buck.core.build.engine.config.CachingBuildEngineBuckConfig;
import com.facebook.buck.core.build.engine.delegate.CachingBuildEngineDelegate;
import com.facebook.buck.core.build.engine.impl.CachingBuildEngine;
import com.facebook.buck.core.build.engine.type.BuildType;
import com.facebook.buck.core.build.execution.context.ExecutionContext;
import com.facebook.buck.core.cell.Cell;
import com.facebook.buck.core.config.BuckConfig;
import com.facebook.buck.core.exceptions.BuildTargetParseException;
import com.facebook.buck.core.exceptions.HumanReadableException;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.TargetConfigurationSerializer;
import com.facebook.buck.core.model.actiongraph.ActionGraphAndBuilder;
import com.facebook.buck.core.rulekey.RuleKey;
import com.facebook.buck.core.util.immutables.BuckStyleImmutable;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.io.filesystem.ProjectFilesystemFactory;
import com.facebook.buck.jvm.java.JavaBuckConfig;
import com.facebook.buck.log.thrift.ThriftRuleKeyLogger;
import com.facebook.buck.manifestservice.ManifestService;
import com.facebook.buck.remoteexecution.config.RemoteExecutionConfig;
import com.facebook.buck.remoteexecution.interfaces.MetadataProvider;
import com.facebook.buck.rules.keys.RuleKeyCacheScope;
import com.facebook.buck.rules.keys.RuleKeyFactories;
import com.facebook.buck.rules.keys.config.RuleKeyConfiguration;
import com.facebook.buck.rules.modern.builders.ModernBuildRuleBuilderFactory;
import com.facebook.buck.rules.modern.config.ModernBuildRuleConfig;
import com.facebook.buck.util.Console;
import com.facebook.buck.util.ExitCode;
import com.facebook.buck.util.concurrent.ExecutorPool;
import com.facebook.buck.util.concurrent.WeightedListeningExecutorService;
import com.facebook.buck.util.environment.Platform;
import com.facebook.buck.util.timing.Clock;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.ListeningExecutorService;
import java.nio.file.Path;
import java.util.Optional;
import org.immutables.value.Value;
/** Used to build a given set of targets. */
public class BuildExecutor {
private final ActionGraphAndBuilder actionGraphAndBuilder;
private final WeightedListeningExecutorService executorService;
private final CachingBuildEngineDelegate cachingBuildEngineDelegate;
private final BuildExecutorArgs args;
private final RuleKeyCacheScope<RuleKey> ruleKeyCacheScope;
private final Optional<BuildType> buildEngineMode;
private final Optional<ThriftRuleKeyLogger> ruleKeyLogger;
private final MetadataProvider metadataProvider;
private final TargetConfigurationSerializer targetConfigurationSerializer;
private final CachingBuildEngine cachingBuildEngine;
private final Build build;
private volatile boolean isShutdown = false;
public BuildExecutor(
BuildExecutorArgs args,
ExecutionContext executionContext,
ActionGraphAndBuilder actionGraphAndBuilder,
CachingBuildEngineDelegate cachingBuildEngineDelegate,
WeightedListeningExecutorService executorService,
boolean keepGoing,
boolean useDistributedBuildCache,
RuleKeyCacheScope<RuleKey> ruleKeyRuleKeyCacheScope,
Optional<BuildType> buildEngineMode,
Optional<ThriftRuleKeyLogger> ruleKeyLogger,
MetadataProvider metadataProvider,
TargetConfigurationSerializer targetConfigurationSerializer,
boolean remoteExecutionAutoEnabled,
boolean forceDisableRemoteExecution) {
this.actionGraphAndBuilder = actionGraphAndBuilder;
this.executorService = executorService;
this.args = args;
this.cachingBuildEngineDelegate = cachingBuildEngineDelegate;
this.buildEngineMode = buildEngineMode;
this.ruleKeyLogger = ruleKeyLogger;
this.ruleKeyCacheScope = ruleKeyRuleKeyCacheScope;
this.metadataProvider = metadataProvider;
this.targetConfigurationSerializer = targetConfigurationSerializer;
// Init resources.
this.cachingBuildEngine =
createCachingBuildEngine(remoteExecutionAutoEnabled, forceDisableRemoteExecution);
this.build =
new Build(
actionGraphAndBuilder.getActionGraphBuilder(),
args.getRootCell(),
cachingBuildEngine,
args.getArtifactCacheFactory().newInstance(useDistributedBuildCache),
args.getBuckConfig().getView(JavaBuckConfig.class).createDefaultJavaPackageFinder(),
args.getClock(),
executionContext,
keepGoing);
}
/**
* Builds the given targets synchronously. Failures are printed to the EventBus.
*
* @param targetsToBuild
* @return exit code.
*/
public ExitCode buildTargets(
Iterable<BuildTarget> targetsToBuild, Optional<Path> pathToBuildReport) throws Exception {
Preconditions.checkArgument(!isShutdown);
try {
return build.executeAndPrintFailuresToEventBus(
targetsToBuild, args.getBuckEventBus(), args.getConsole(), pathToBuildReport);
} catch (BuildTargetParseException e) {
throw new HumanReadableException(
e.getMessage()
+ "\n"
+ "Please check whether one of the targets passed as parameter has an empty or invalid name.");
}
}
public CachingBuildEngine getCachingBuildEngine() {
return cachingBuildEngine;
}
/**
* Destroy any resources associated with this builder. Call this once only, when all
* buildLocallyAndReturnExitCode calls have finished.
*/
public synchronized void shutdown() {
if (isShutdown) {
return;
}
isShutdown = true;
// Destroy resources.
build.close();
cachingBuildEngine.close();
}
private CachingBuildEngine createCachingBuildEngine(
boolean remoteExecutionAutoEnabled, boolean forceDisableRemoteExecution) {
CachingBuildEngineBuckConfig engineConfig =
args.getBuckConfig().getView(CachingBuildEngineBuckConfig.class);
return new CachingBuildEngine(
cachingBuildEngineDelegate,
ModernBuildRuleBuilderFactory.getBuildStrategy(
args.getBuckConfig().getView(ModernBuildRuleConfig.class),
args.getBuckConfig().getView(RemoteExecutionConfig.class),
actionGraphAndBuilder.getActionGraphBuilder(),
args.getRootCell(),
args.getRootCell().getCellPathResolver(),
cachingBuildEngineDelegate.getFileHashCache(),
args.getBuckEventBus(),
metadataProvider,
remoteExecutionAutoEnabled,
forceDisableRemoteExecution),
executorService,
buildEngineMode.orElse(engineConfig.getBuildEngineMode()),
engineConfig.getBuildDepFiles(),
engineConfig.getBuildMaxDepFileCacheEntries(),
engineConfig.getBuildArtifactCacheSizeLimit(),
actionGraphAndBuilder.getActionGraphBuilder(),
actionGraphAndBuilder.getBuildEngineActionToBuildRuleResolver(),
targetConfigurationSerializer,
args.getBuildInfoStoreManager(),
engineConfig.getResourceAwareSchedulingInfo(),
engineConfig.getConsoleLogBuildRuleFailuresInline(),
RuleKeyFactories.of(
args.getRuleKeyConfiguration(),
cachingBuildEngineDelegate.getFileHashCache(),
actionGraphAndBuilder.getActionGraphBuilder(),
args.getBuckConfig().getView(BuildBuckConfig.class).getBuildInputRuleKeyFileSizeLimit(),
ruleKeyCacheScope.getCache(),
ruleKeyLogger),
args.getManifestService());
}
public Build getBuild() {
return build;
}
}
/** Common arguments for running a build. */
@Value.Immutable
@BuckStyleImmutable
abstract class AbstractBuildExecutorArgs {
public abstract Console getConsole();
public abstract BuckEventBus getBuckEventBus();
public abstract Platform getPlatform();
public abstract Clock getClock();
public abstract Cell getRootCell();
public abstract ImmutableMap<ExecutorPool, ListeningExecutorService> getExecutors();
public abstract ProjectFilesystemFactory getProjectFilesystemFactory();
public abstract BuildInfoStoreManager getBuildInfoStoreManager();
public abstract ArtifactCacheFactory getArtifactCacheFactory();
public abstract RuleKeyConfiguration getRuleKeyConfiguration();
public abstract Optional<ManifestService> getManifestService();
public BuckConfig getBuckConfig() {
return getRootCell().getBuckConfig();
}
}
| |
// -*- mode: java; c-basic-offset: 2; -*-
// Copyright 2016-2017 MIT, All rights reserved
// Released under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
package com.google.appinventor.client.editor.simple.components;
import static com.google.appinventor.client.Ode.MESSAGES;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.google.appinventor.client.ErrorReporter;
import com.google.appinventor.client.Ode;
import com.google.appinventor.client.editor.simple.SimpleEditor;
import com.google.appinventor.client.editor.simple.palette.SimplePaletteItem;
import com.google.appinventor.client.widgets.dnd.DragSource;
import com.google.appinventor.components.common.ComponentConstants;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.event.logical.shared.AttachEvent;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.ui.AbsolutePanel;
import com.google.gwt.user.client.ui.Image;
public final class MockMap extends MockContainer {
public static final String TYPE = "Map";
protected static final String PROPERTY_NAME_LATITUDE = "Latitude";
protected static final String PROPERTY_NAME_LONGITUDE = "Longitude";
protected static final String PROPERTY_NAME_MAP_TYPE = "MapType";
protected static final String PROPERTY_NAME_CENTER_FROM_STRING = "CenterFromString";
protected static final String PROPERTY_NAME_ZOOM_LEVEL = "ZoomLevel";
protected static final String PROPERTY_NAME_SHOW_COMPASS = "ShowCompass";
protected static final String PROPERTY_NAME_SHOW_ZOOM = "ShowZoom";
protected static final String PROPERTY_NAME_SHOW_USER = "ShowUser";
protected static final String PROPERTY_NAME_ENABLE_ROTATION = "EnableRotation";
protected static final String PROPERTY_NAME_SHOW_SCALE = "ShowScale";
protected static final String PROPERTY_NAME_SCALE_UNITS = "ScaleUnits";
/**
* The Widget wrapping the element where the map tiles will be rendered.
*/
protected final AbsolutePanel mapWidget;
/**
* The JavaScript object representing the non-GWT maps renderer.
*/
private JavaScriptObject mapInstance;
/**
* A JavaScript array containing the (1-indexed) tile layers used for maps.
*/
private JavaScriptObject tileLayers;
/**
* Active base tile layer.
*/
private JavaScriptObject baseLayer;
/**
* Set of event listeners that will be triggered on native map events.
*/
private final Set<MockMapEventListener> listeners = new HashSet<MockMapEventListener>();
// Settings for the internal maps component
private double latitude = 42.359144;
private double longitude = -71.093612;
private int zoomLevel = 13;
private int selectedTileLayer = 1;
private boolean zoomControl = false;
private boolean compassEnabled = false;
private boolean userLocationEnabled = false;
private boolean showScale = false;
private int scaleUnits = 1;
public MockMap(SimpleEditor editor) {
super(editor, TYPE, images.map(), new MockMapLayout());
initToolbarItems();
rootPanel.setHeight("100%");
mapWidget = new AbsolutePanel();
mapWidget.setStylePrimaryName("ode-SimpleMockContainer");
mapWidget.add(rootPanel);
initComponent(mapWidget);
mapWidget.addAttachHandler(new AttachEvent.Handler() {
@Override
public void onAttachOrDetach(AttachEvent arg0) {
if (arg0.isAttached()) {
initPanel();
invalidateMap();
for (MockComponent child : children) {
((MockMapFeature) child).addToMap(MockMap.this);
}
}
}
});
}
@Override
public void collectTypesAndIcons(Map<String, String> typesAndIcons) {
super.collectTypesAndIcons(typesAndIcons);
// These types can be loaded dynamically using LoadFromURL, so we want to show
// generic options even though the user might not have explicitly created one
typesAndIcons.put("Marker", new Image(images.marker()).getElement().getString());
typesAndIcons.put("LineString", new Image(images.linestring()).getElement().getString());
typesAndIcons.put("Polygon", new Image(images.polygon()).getElement().getString());
}
public void addEventListener(MockMapEventListener listener) {
listeners.add(listener);
}
public void removeEventListener(MockMapEventListener listener) {
listeners.remove(listener);
}
@Override
public int getPreferredWidth() {
return ComponentConstants.VIDEOPLAYER_PREFERRED_WIDTH;
}
@Override
public int getPreferredHeight() {
return ComponentConstants.VIDEOPLAYER_PREFERRED_HEIGHT;
}
@Override
public void onBrowserEvent(Event event) {
if (isUnlocked()) {
setShouldCancel(event, false);
} else {
super.onBrowserEvent(event);
}
}
@Override
protected boolean acceptableSource(DragSource source) {
MockComponent component = null;
if (source instanceof MockComponent) {
component = (MockComponent) source;
} else if (source instanceof SimplePaletteItem) {
component = (MockComponent) source.getDragWidget();
}
return component instanceof MockMapFeature;
}
private void setBackgroundColorProperty(String text) {
if (MockComponentsUtil.isDefaultColor(text)) {
text = "&HFFFFFFFF";
}
MockComponentsUtil.setWidgetBackgroundColor(mapWidget, text);
}
private void setEnabledProperty(String text) {
MockComponentsUtil.setEnabled(this, text);
}
@Override
public void onPropertyChange(String propertyName, String newValue) {
super.onPropertyChange(propertyName, newValue);
if (propertyName.equals(PROPERTY_NAME_ENABLED)) {
setEnabledProperty(newValue);
} else if (propertyName.equals(PROPERTY_NAME_BACKGROUNDCOLOR)) {
setBackgroundColorProperty(newValue);
} else if (propertyName.equals(PROPERTY_NAME_LATITUDE)) {
setLatitude(newValue);
} else if (propertyName.equals(PROPERTY_NAME_LONGITUDE)) {
setLongitude(newValue);
} else if (propertyName.equals(PROPERTY_NAME_WIDTH)) {
invalidateMap();
} else if (propertyName.equals(PROPERTY_NAME_HEIGHT)) {
invalidateMap();
} else if (propertyName.equals(PROPERTY_NAME_MAP_TYPE)) {
setMapType(newValue);
} else if (propertyName.equals(PROPERTY_NAME_CENTER_FROM_STRING)) {
setCenter(newValue);
} else if (propertyName.equals(PROPERTY_NAME_ZOOM_LEVEL)) {
setZoomLevel(newValue);
} else if (propertyName.equals(PROPERTY_NAME_SHOW_COMPASS)) {
setShowCompass(newValue);
} else if (propertyName.equals(PROPERTY_NAME_SHOW_USER)) {
setShowUser(newValue);
} else if (propertyName.equals(PROPERTY_NAME_SHOW_ZOOM)) {
setShowZoom(newValue);
} else if (propertyName.equals(PROPERTY_NAME_SHOW_SCALE)) {
setShowScale(newValue);
} else if (propertyName.equals(PROPERTY_NAME_SCALE_UNITS)) {
setScaleUnits(newValue);
}
}
public final JavaScriptObject getMapInstance() {
return mapInstance;
}
private void setLatitude(String text) {
latitude = Double.parseDouble(text);
updateMapLatitude(latitude);
}
private void setLongitude(String text) {
longitude = Double.parseDouble(text);
updateMapLongitude(longitude);
}
private void setMapType(String tileLayerId) {
try {
selectedTileLayer = Integer.parseInt(tileLayerId);
updateMapType(selectedTileLayer);
} catch(NumberFormatException e) {
ErrorReporter.reportError(MESSAGES.unknownMapTypeException(tileLayerId));
changeProperty(PROPERTY_NAME_MAP_TYPE, Integer.toString(selectedTileLayer));
}
}
private void setCenter(String center) {
String[] parts = center.split(",");
if (parts.length != 2) {
ErrorReporter.reportError(MESSAGES.mapCenterWrongNumberArgumentsException(parts.length));
changeProperty(PROPERTY_NAME_CENTER_FROM_STRING, latitude + ", " + longitude);
} else {
latitude = Double.parseDouble(parts[0].trim());
longitude = Double.parseDouble(parts[1].trim());
updateMapCenter(latitude, longitude);
}
}
private void setZoomLevel(String zoom) {
int zoomLevel = Integer.parseInt(zoom);
if (zoomLevel < 1 || zoomLevel > 18) {
ErrorReporter.reportError(MESSAGES.mapZoomLevelOutOfBoundsException());
changeProperty(PROPERTY_NAME_ZOOM_LEVEL, Integer.toString(this.zoomLevel));
} else {
this.zoomLevel = zoomLevel;
updateMapZoomLevel(Integer.parseInt(zoom));
}
}
private void setShowCompass(String state) {
this.compassEnabled = Boolean.parseBoolean(state);
updateMapCompassControl(this.compassEnabled);
}
private void setShowUser(String state) {
this.userLocationEnabled = Boolean.parseBoolean(state);
updateMapShowUser(this.userLocationEnabled);
}
private void setShowZoom(String state) {
this.zoomControl = Boolean.parseBoolean(state);
updateMapZoomControl(this.zoomControl);
}
private void setShowScale(String state) {
this.showScale = Boolean.parseBoolean(state);
updateMapShowScale(this.showScale);
}
private void setScaleUnits(String state) {
if (state.equals("1")) {
this.scaleUnits = 1;
} else if (state.equals("2")) {
this.scaleUnits = 2;
} else {
throw new IllegalArgumentException("Unexpected value for scale: " + state);
}
updateScaleUnits(this.scaleUnits);
}
// event handlers
protected void onBoundsChanged() {
// TODO(ewpatton): Send incremental update to companion
for (MockMapEventListener listener : listeners) {
listener.onBoundsChanged();
}
}
protected void onResetButtonClicked() {
try {
updateMapZoomLevel(zoomLevel);
updateMapCenter(latitude, longitude);
} catch(NumberFormatException e) {
// this shouldn't happen in the normal use of the component
}
for (MockMapEventListener listener : listeners) {
listener.onResetButtonClicked();
}
}
protected void onLockButtonClicked() {
// we are moving to an unlocked state
for (MockMapEventListener listener : listeners) {
listener.onLockButtonClicked();
}
}
protected void onUnlockButtonClicked() {
// we are moving to a locked state
for (MockMapEventListener listener : listeners) {
listener.onUnlockButtonClicked();
}
}
protected void onSetInitialBoundsClicked() {
final LatLng centerPoint = getCenter();
final int zoom = getZoom();
this.latitude = centerPoint.latitude;
this.longitude = centerPoint.longitude;
this.zoomLevel = zoom;
properties.changePropertyValue("CenterFromString", centerPoint.toString());
properties.changePropertyValue("ZoomLevel", Integer.toString(zoom));
for (MockMapEventListener listener : listeners) {
listener.onSetInitialBoundsClicked();
}
}
// Native Javascript Methods (JSNI)
/**
* Initialize the controls for the AppInventor map toolbar.
* These controls allow the user to:
* <ul>
* <li>change the drag behavior from the default of component reordering to panning the map.
* <li>update the starting center and zoom level from the map viewport.
* <li>reset the map viewport to the center and zoom level specified in the properties.
* </ul>
* This method will be called with every MockMap created, but will only instantiate a singleton
* set of items.
*/
private static native void initToolbarItems()/*-{
var MESSAGES = @com.google.appinventor.client.Ode::MESSAGES;
var L = $wnd.top.L;
if (L.AI2Lock === undefined) {
L.AI2Lock = L.ToolbarAction.extend({
options: {
toolbarIcon: {
tooltip: MESSAGES.@com.google.appinventor.client.OdeMessages::mapLockMovementTooltip()()
}
},
_createIcon: function(toolbar, container, args) {
L.ToolbarAction.prototype._createIcon.call(this, toolbar, container, args);
var lockIcon = L.DomUtil.create('i'),
unlockIcon = L.DomUtil.create('i');
lockIcon.setAttribute('class', 'fa fa-lock');
lockIcon.setAttribute('aria-hidden', 'true');
unlockIcon.setAttribute('class', 'fa fa-unlock');
unlockIcon.setAttribute('aria-hidden', 'true');
this.locked = false;
L.DomUtil.addClass(this._link, 'unlocked');
this._link.appendChild(lockIcon);
this._link.appendChild(unlockIcon);
var self = this;
L.DomEvent.on(this._link, 'mousedown', function(e) {
e.stopPropagation();
});
L.DomEvent.on(this._link, 'click', function(e) {
self.locked = !self.locked;
var map = self.toolbar._control._map;
map.unlocked = !self.locked;
var interactions = [map.dragging, map.touchZoom, map.doubleClickZoom, map.scrollWheelZoom, map.boxZoom, map.keyboard, map.tap];
if (self.locked) {
for (var i in interactions) interactions[i] && interactions[i].disable();
L.DomUtil.addClass(self._link, 'locked');
L.DomUtil.removeClass(self._link, 'unlocked');
self._link.setAttribute('title', MESSAGES.@com.google.appinventor.client.OdeMessages::mapUnlockMovementTooltip()());
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::onUnlockButtonClicked()();
} else {
for (var i in interactions) interactions[i] && interactions[i].enable();
L.DomUtil.addClass(self._link, 'unlocked');
L.DomUtil.removeClass(self._link, 'locked');
self._link.setAttribute('title', MESSAGES.@com.google.appinventor.client.OdeMessages::mapLockMovementTooltip()());
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::onLockButtonClicked()();
}
});
}
});
L.AI2Center = L.ToolbarAction.extend({
options: {
toolbarIcon: {
tooltip: MESSAGES.@com.google.appinventor.client.OdeMessages::mapSetInitialMapTooltip()()
}
},
_createIcon: function(toolbar, container, args) {
var icon = L.DomUtil.create('i');
L.ToolbarAction.prototype._createIcon.call(this, toolbar, container, args);
icon.setAttribute('class', 'fa fa-crosshairs');
this._link.appendChild(icon);
var self = this;
L.DomEvent.on(this._link, 'click', function() {
var javaMockMap = self.toolbar._control._map.owner;
javaMockMap.@com.google.appinventor.client.editor.simple.components.MockMap::onSetInitialBoundsClicked()();
});
}
});
L.AI2Reset = L.ToolbarAction.extend({
options: {
toolbarIcon: {
tooltip: MESSAGES.@com.google.appinventor.client.OdeMessages::mapResetBoundingBoxTooltip()()
}
},
_createIcon: function(toolbar, container, args) {
var icon = L.DomUtil.create('i');
L.ToolbarAction.prototype._createIcon.call(this, toolbar, container, args);
icon.setAttribute('class', 'fa fa-history');
this._link.appendChild(icon);
var self = this;
L.DomEvent.on(this._link, 'click', $entry(function() {
var javaMockMap = self.toolbar._control._map.owner;
javaMockMap.@com.google.appinventor.client.editor.simple.components.MockMap::onResetButtonClicked()();
}));
}
});
L.Control.Compass = L.Control.extend({
options: { position: 'topright' },
onAdd: function () {
var container = L.DomUtil.create('div', 'compass-control'),
img = L.DomUtil.create('img');
img.setAttribute('src', '/leaflet/assets/compass.svg');
container.appendChild(img);
return container;
}
});
L.control.compass = function(options) {
return new L.Control.Compass(options);
};
L.UserOverlay = L.Layer.extend({
onAdd: function(map) {
this._map = map;
this._el = L.DomUtil.create('div', 'ai2-user-mock-location leaflet-zoom-hide');
var img = L.DomUtil.create('img');
this._el.appendChild(img);
img.setAttribute('src', '/leaflet/assets/location.png');
map.getPanes()['overlayPane'].appendChild(this._el);
map.on('viewreset', this._reposition, this);
this._reposition();
return this._el;
},
onRemove: function(map) {
map.getPanes().overlayPane.removeChild(this._el);
map.off('resize', this._reposition);
},
_reposition: function(e) {
var pos = this._map.latLngToLayerPoint(this._map.getCenter());
L.DomUtil.setPosition(this._el, pos);
}
});
}
}-*/;
public native LatLng getCenter()/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
var center = map.getCenter();
return @com.google.appinventor.client.editor.simple.components.MockMap.LatLng::new(DD)(center.lat, center.lng);
}
return null;
}-*/;
public native int getZoom()/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
return map ? map.getZoom() : 0;
}-*/;
private native void initPanel()/*-{
var L = $wnd.top.L;
var tileLayers = [
null, // because AppInventor is 1-indexed, we leave element 0 as null
L.tileLayer('http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
{minZoom: 0, maxZoom: 18,
attribution: 'Map data © <a href="http://openstreetmap.org">OpenStreetMap</a> contributors'}),
L.tileLayer('http://basemap.nationalmap.gov/arcgis/rest/services/USGSImageryTopo/MapServer/tile/{z}/{y}/{x}',
{minZoom: 0, maxZoom: 15,
attribution: 'Satellite imagery © <a href="http://mapquest.com">USGS</a>'}),
L.tileLayer('http://basemap.nationalmap.gov/ArcGIS/rest/services/USGSTopo/MapServer/tile/{z}/{y}/{x}',
{minZoom: 0, maxZoom: 15,
attribution: 'Map data © <a href="http://www.usgs.gov">USGS</a>'})
];
this.@com.google.appinventor.client.editor.simple.components.MockMap::tileLayers = tileLayers;
this.@com.google.appinventor.client.editor.simple.components.MockMap::baseLayer =
tileLayers[this.@com.google.appinventor.client.editor.simple.components.MockMap::selectedTileLayer];
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
// map exists but may be invalid due to change in the dom, so invalidate and redraw
map.invalidateSize(false);
} else {
var panel = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapWidget;
var elem = panel.@com.google.gwt.user.client.ui.UIObject::getElement()();
if (elem.firstElementChild != null) elem = elem.firstElementChild;
var latitude = this.@com.google.appinventor.client.editor.simple.components.MockMap::latitude,
longitude = this.@com.google.appinventor.client.editor.simple.components.MockMap::longitude,
zoomControl = this.@com.google.appinventor.client.editor.simple.components.MockMap::zoomControl,
zoom = this.@com.google.appinventor.client.editor.simple.components.MockMap::zoomLevel,
showScale = this.@com.google.appinventor.client.editor.simple.components.MockMap::showScale,
scaleUnits = this.@com.google.appinventor.client.editor.simple.components.MockMap::scaleUnits;
map = L.map(elem, {zoomControl: false, editable: true}).setView([latitude, longitude], zoom);
var messages = @com.google.appinventor.client.Ode::getMessages()();
map.zoomControl = L.control.zoom({
position: 'topleft',
zoomInTitle: messages.@com.google.appinventor.client.OdeMessages::mapZoomIn()(),
zoomOutTitle: messages.@com.google.appinventor.client.OdeMessages::mapZoomOut()()
});
if (zoomControl) {
map.zoomControl.addTo(map);
}
var scaleOptions = {metric: true, imperial: false, position: 'bottomright'};
if (scaleUnits == 2) {
scaleOptions.metric = false;
scaleOptions.imperial = true;
}
map.scaleControl = L.control.scale(scaleOptions);
if (showScale) {
map.scaleControl.addTo(map);
}
map.owner = this;
map.unlocked = true;
map.aiControls = new L.Toolbar.Control({position: 'bottomleft',
actions: [ L.AI2Lock, L.AI2Center, L.AI2Reset ]});
map.aiControls.addTo(map);
map.compassLayer = L.control.compass();
map.userLayer = new L.UserOverlay();
map.on('mouseup click', function(e) {
e = e.originalEvent;
if (e.eventPhase !== 3) return;
var el = e.target,
overlay = this.getPanes()['overlayPane'],
markers = this.getPanes()['markerPane'],
background = this.getPanes()['tilePane'],
container = this.getContainer();
while (el && el.parentNode !== container) {
if (el === overlay || el === markers) {
// Overlays handle their own click events, but sometimes it propagates to the map and eventually GWT.
// This is not desirable because it causes issues with the selected component.
return;
} else if (el === background) {
this.owner.@com.google.appinventor.client.editor.simple.components.MockComponent::select()();
return;
}
el = el.parentNode;
}
});
this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance = map;
setTimeout(function() {
map.addLayer(map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::baseLayer);
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::updateMapZoomControl(*)(
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::zoomControl);
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::updateMapCompassControl(*)(
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::compassEnabled);
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::updateMapShowUser(*)(
map.owner.@com.google.appinventor.client.editor.simple.components.MockMap::userLocationEnabled);
});
}
}-*/;
native void invalidateMap()/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) { // Map may not be initialized yet, e.g., during project load.
setTimeout(function() {
map.invalidateSize(false);
}, 0);
}
}-*/;
private native void updateMapLatitude(double latitude)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
var longitude = this.@com.google.appinventor.client.editor.simple.components.MockMap::longitude;
map.panTo($wnd.top.L.latLng(latitude, longitude));
}-*/;
private native void updateMapLongitude(double longitude)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
var latitude = this.@com.google.appinventor.client.editor.simple.components.MockMap::latitude;
map.panTo($wnd.top.L.latLng(latitude, longitude));
}-*/;
private native void updateMapCenter(double latitude, double longitude)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) { // Map may not be initialized yet, e.g., during project load.
map.panTo([latitude, longitude], {animate: true});
}
}-*/;
private native void updateMapType(int type)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
var tileLayers = this.@com.google.appinventor.client.editor.simple.components.MockMap::tileLayers;
var baseLayer = this.@com.google.appinventor.client.editor.simple.components.MockMap::baseLayer;
if (map && baseLayer && tileLayers) {
if (0 < type && type < tileLayers.length) {
map.removeLayer(baseLayer);
baseLayer = tileLayers[type];
map.addLayer(baseLayer);
baseLayer.bringToBack();
this.@com.google.appinventor.client.editor.simple.components.MockMap::baseLayer = baseLayer;
}
}
}-*/;
native LatLng projectFromXY(int x, int y)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
var result = map.containerPointToLatLng([x, y]);
return @com.google.appinventor.client.editor.simple.components.MockMap.LatLng::new(DD)(result.lat, result.lng);
}
}-*/;
private native void updateMapZoomLevel(int zoomLevel)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
map.setZoom(zoomLevel);
}
}-*/;
private native void updateMapCompassControl(boolean enable)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
if (enable) {
map.addControl(map.compassLayer);
} else {
map.removeControl(map.compassLayer);
}
}
}-*/;
private native void updateMapShowUser(boolean enable)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
if (enable) {
map.addLayer(map.userLayer);
} else {
map.removeLayer(map.userLayer);
}
}
}-*/;
private native void updateMapZoomControl(boolean enable)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
if (!map.zoomControl) {
map.zoomControl = $wnd.top.L.control.zoom();
}
if (enable) {
map.zoomControl.addTo(map);
} else {
map.removeControl(map.zoomControl);
}
}
}-*/;
private native void updateMapShowScale(boolean enable)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
if (!map.scaleControl) {
map.scaleControl = $wnd.top.L.control.scale({position: 'topleft'});
}
if (enable) {
map.scaleControl.addTo(map);
} else {
map.removeControl(map.scaleControl);
}
}
}-*/;
private native void updateScaleUnits(int units)/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance,
scaleVisible = this.@com.google.appinventor.client.editor.simple.components.MockMap::showScale;
if (map) {
if (scaleVisible) {
map.removeControl(map.scaleControl);
}
map.scaleControl = $wnd.top.L.control.scale({
metric: units == 1,
imperial: units == 2,
position: 'bottomright'
});
if (scaleVisible) {
map.scaleControl.addTo(map);
}
}
}-*/;
private native boolean isUnlocked()/*-{
var map = this.@com.google.appinventor.client.editor.simple.components.MockMap::mapInstance;
if (map) {
return map.unlocked;
} else {
return false;
}
}-*/;
public static class LatLng {
public double latitude;
public double longitude;
public LatLng(double latitude, double longitude) {
this.latitude = latitude;
this.longitude = longitude;
}
@Override
public String toString() {
return Double.toString(latitude) + ", " + Double.toString(longitude);
}
public native NativeLatLng toNative()/*-{
return {
lat: this.@com.google.appinventor.client.editor.simple.components.MockMap.LatLng::latitude,
lng: this.@com.google.appinventor.client.editor.simple.components.MockMap.LatLng::longitude
};
}-*/;
}
public static class NativeLatLng extends JavaScriptObject {
protected NativeLatLng() {}
public final native double getLatitude()/*-{
return this.lat;
}-*/;
public final native double getLongitude()/*-{
return this.lng;
}-*/;
}
public interface MockMapEventListener {
void onBoundsChanged();
void onResetButtonClicked();
void onLockButtonClicked();
void onUnlockButtonClicked();
void onSetInitialBoundsClicked();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Stable;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.QueueState;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode;
/**
* <code>CSQueue</code> represents a node in the tree of
* hierarchical queues in the {@link CapacityScheduler}.
*/
@Stable
@Private
public interface CSQueue
extends org.apache.hadoop.yarn.server.resourcemanager.scheduler.Queue {
/**
* Get the parent <code>Queue</code>.
* @return the parent queue
*/
public CSQueue getParent();
/**
* Set the parent <code>Queue</code>.
* @param newParentQueue new parent queue
*/
public void setParent(CSQueue newParentQueue);
/**
* Get the queue name.
* @return the queue name
*/
public String getQueueName();
/**
* Get the full name of the queue, including the heirarchy.
* @return the full name of the queue
*/
public String getQueuePath();
/**
* Get the configured <em>capacity</em> of the queue.
* @return queue capacity
*/
public float getCapacity();
/**
* Get capacity of the parent of the queue as a function of the
* cumulative capacity in the cluster.
* @return capacity of the parent of the queue as a function of the
* cumulative capacity in the cluster
*/
public float getAbsoluteCapacity();
/**
* Get the configured maximum-capacity of the queue.
* @return the configured maximum-capacity of the queue
*/
public float getMaximumCapacity();
/**
* Get maximum-capacity of the queue as a funciton of the cumulative capacity
* of the cluster.
* @return maximum-capacity of the queue as a funciton of the cumulative capacity
* of the cluster
*/
public float getAbsoluteMaximumCapacity();
/**
* Get the current absolute used capacity of the queue
* relative to the entire cluster.
* @return queue absolute used capacity
*/
public float getAbsoluteUsedCapacity();
/**
* Get the current used capacity of the queue
* and it's children (if any).
* @return queue used capacity
*/
public float getUsedCapacity();
/**
* Set used capacity of the queue.
* @param usedCapacity used capacity of the queue
*/
public void setUsedCapacity(float usedCapacity);
/**
* Set absolute used capacity of the queue.
* @param absUsedCapacity absolute used capacity of the queue
*/
public void setAbsoluteUsedCapacity(float absUsedCapacity);
/**
* Get the currently utilized resources in the cluster
* by the queue and children (if any).
* @return used resources by the queue and it's children
*/
public Resource getUsedResources();
/**
* Get the current run-state of the queue
* @return current run-state
*/
public QueueState getState();
/**
* Get child queues
* @return child queues
*/
public List<CSQueue> getChildQueues();
/**
* Check if the <code>user</code> has permission to perform the operation
* @param acl ACL
* @param user user
* @return <code>true</code> if the user has the permission,
* <code>false</code> otherwise
*/
public boolean hasAccess(QueueACL acl, UserGroupInformation user);
/**
* Submit a new application to the queue.
* @param application application being submitted
* @param user user who submitted the application
* @param queue queue to which the application is submitted
*/
public void submitApplication(FiCaSchedulerApp application, String user,
String queue)
throws AccessControlException;
/**
* An application submitted to this queue has finished.
* @param application
* @param queue application queue
*/
public void finishApplication(FiCaSchedulerApp application, String queue);
/**
* Assign containers to applications in the queue or it's children (if any).
* @param clusterResource the resource of the cluster.
* @param node node on which resources are available
* @return the assignment
*/
public CSAssignment assignContainers(
Resource clusterResource, FiCaSchedulerNode node);
/**
* A container assigned to the queue has completed.
* @param clusterResource the resource of the cluster
* @param application application to which the container was assigned
* @param node node on which the container completed
* @param container completed container,
* <code>null</code> if it was just a reservation
* @param containerStatus <code>ContainerStatus</code> for the completed
* container
* @param childQueue <code>CSQueue</code> to reinsert in childQueues
* @param event event to be sent to the container
*/
public void completedContainer(Resource clusterResource,
FiCaSchedulerApp application, FiCaSchedulerNode node,
RMContainer container, ContainerStatus containerStatus,
RMContainerEventType event, CSQueue childQueue);
/**
* Get the number of applications in the queue.
* @return number of applications
*/
public int getNumApplications();
/**
* Reinitialize the queue.
* @param newlyParsedQueue new queue to re-initalize from
* @param clusterResource resources in the cluster
*/
public void reinitialize(CSQueue newlyParsedQueue, Resource clusterResource)
throws IOException;
/**
* Update the cluster resource for queues as we add/remove nodes
* @param clusterResource the current cluster resource
*/
public void updateClusterResource(Resource clusterResource);
/**
* Get the {@link ActiveUsersManager} for the queue.
* @return the <code>ActiveUsersManager</code> for the queue
*/
public ActiveUsersManager getActiveUsersManager();
/**
* Recover the state of the queue
* @param clusterResource the resource of the cluster
* @param application the application for which the container was allocated
* @param container the container that was recovered.
*/
public void recoverContainer(Resource clusterResource, FiCaSchedulerApp application,
Container container);
}
| |
/*
* Copyright 2013 Babak Farhang
*/
package com.gnahraf.io.store.table;
import static org.junit.Assert.*;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Random;
import org.junit.Test;
import com.gnahraf.io.store.table.order.RowOrder;
import com.gnahraf.io.store.table.order.RowOrders;
/**
*
* @author Babak
*/
public class TableSorterTest extends TableTestHarness {
protected final static int ROW_WIDTH = 4;
protected final static RowOrder ORDER = RowOrders.INT_ORDER;
protected long beginSortNanos;
protected long endSortNanos;
@Test
public void simplest() throws IOException {
initUnitTestDir(new Object() { });
int[] values = {
89, 23
};
testImpl(values);
}
@Test
public void testSome() throws IOException {
initUnitTestDir(new Object() { });
int[] values = {
89, 23, 11, 12, 36, 9, 17, 54
};
testImpl(values);
}
// 4 byte rows
@Test
public void test1K() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(1);
}
@Test
public void test2K() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(2);
}
@Test
public void test4K() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(4);
}
@Test
public void test8K() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(8);
}
@Test
public void test16K() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(16);
}
@Test
public void test32K() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(32);
}
@Test
public void test64K() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(64);
}
// 8 byte rows
@Test
public void test1K8() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(1, 8);
}
@Test
public void test2K8() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(2, 8);
}
@Test
public void test4K8() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(4, 8);
}
@Test
public void test8K8() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(8, 8);
}
@Test
public void test16K8() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(16, 8);
}
@Test
public void test32K8() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(32, 8);
}
@Test
public void test64K8() throws IOException {
initUnitTestDir(new Object() { });
bigTestImpl(64, 8);
}
// 64 byte rows
@Test
public void testCount128Width64() throws IOException {
initUnitTestDir(new Object() { });
bigTest(128, 64);
}
@Test
public void testCount256Width64() throws IOException {
initUnitTestDir(new Object() { });
bigTest(256, 64);
}
@Test
public void testCount512Width64() throws IOException {
initUnitTestDir(new Object() { });
bigTest(512, 64);
}
@Test
public void testCount1KWidth64() throws IOException {
initUnitTestDir(new Object() { });
bigTest(1024, 64);
}
@Test
public void testCount2KWidth64() throws IOException {
initUnitTestDir(new Object() { });
bigTest(2048, 64);
}
@Test
public void testCount4KWidth64() throws IOException {
initUnitTestDir(new Object() { });
bigTest(4 * 1024, 64);
}
private void bigTestImpl(int kiloBytes) throws IOException {
bigTestImpl(kiloBytes, ROW_WIDTH);
}
private void bigTest(int count, int rowWidth) throws IOException {
bigTestImpl((count * rowWidth) / 1024, rowWidth);
}
private void bigTestImpl(int kiloBytes, int rowWidth) throws IOException {
int memory = kiloBytes * 1024;
int[] values = new int[memory / rowWidth];
Random random = new Random(memory);
for (int i = 0; i < values.length; ++i)
values[i] = random.nextInt();
testImpl(values, rowWidth);
long lapMillis = (endSortNanos - beginSortNanos) / 1000000;
long rowsSortedPerMilli = (long) (((double) values.length) / (endSortNanos - beginSortNanos) * 1000000);
log.info(
"[" + getMethod() + "] sorted " + kiloBytes + "kB (" + values.length + " rows) in " +
lapMillis + " msec. Sort rate: " + rowsSortedPerMilli + " rows per msec");
}
private void testImpl(int[] values) throws IOException {
testImpl(values, ROW_WIDTH);
}
protected void testImpl(int[] values, int rowWidth) throws IOException {
// initialize the input table with the given rows..
// (test method returns a SortedTable, but it's lying.. its unsorted)
Table unsorted = initIntTable(rowWidth, ORDER, values, 0);
TableSorter sorter = new TableSorter(ByteBuffer.allocate(values.length * rowWidth), ORDER);
File sortedFile = new File(unitTestDir(), "sorted");
beginSortNanos = System.nanoTime();
sorter.sort(unsorted, sortedFile);
endSortNanos = System.nanoTime();
unsorted.close();
Table sorted = Table.newSansKeystoneInstance(
new RandomAccessFile(sortedFile, "rw").getChannel(), rowWidth);
// sort the values so that they become the expected sequence
Arrays.sort(values);
// verify the expected sequence..
ByteBuffer rowBuf = ByteBuffer.allocate(rowWidth);
assertEquals(values.length, sorted.getRowCount());
for (int i = values.length; i-- > 0; ) {
rowBuf.clear();
sorted.read(i, rowBuf);
rowBuf.flip();
assertEquals(values[i], rowBuf.getInt());
}
sorted.close();
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.inventory.rest;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
import static javax.ws.rs.core.Response.Status.CONFLICT;
import static javax.ws.rs.core.Response.Status.CREATED;
import static javax.ws.rs.core.Response.Status.FORBIDDEN;
import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
import static javax.ws.rs.core.Response.Status.NOT_FOUND;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.hawkular.inventory.api.Data;
import org.hawkular.inventory.api.EntityAlreadyExistsException;
import org.hawkular.inventory.api.EntityNotFoundException;
import org.hawkular.inventory.api.Environments;
import org.hawkular.inventory.api.Feeds;
import org.hawkular.inventory.api.Inventory;
import org.hawkular.inventory.api.MetricTypes;
import org.hawkular.inventory.api.Metrics;
import org.hawkular.inventory.api.OperationTypes;
import org.hawkular.inventory.api.Relationships;
import org.hawkular.inventory.api.ResolvableToSingle;
import org.hawkular.inventory.api.ResolvableToSingleWithRelationships;
import org.hawkular.inventory.api.ResourceTypes;
import org.hawkular.inventory.api.Resources;
import org.hawkular.inventory.api.Tenants;
import org.hawkular.inventory.api.TransactionFrame;
import org.hawkular.inventory.api.WriteInterface;
import org.hawkular.inventory.api.model.AbstractElement;
import org.hawkular.inventory.api.model.Blueprint;
import org.hawkular.inventory.api.model.CanonicalPath;
import org.hawkular.inventory.api.model.DataEntity;
import org.hawkular.inventory.api.model.ElementBlueprintVisitor;
import org.hawkular.inventory.api.model.ElementTypeVisitor;
import org.hawkular.inventory.api.model.Entity;
import org.hawkular.inventory.api.model.Environment;
import org.hawkular.inventory.api.model.Feed;
import org.hawkular.inventory.api.model.Metric;
import org.hawkular.inventory.api.model.MetricType;
import org.hawkular.inventory.api.model.OperationType;
import org.hawkular.inventory.api.model.Path;
import org.hawkular.inventory.api.model.Relationship;
import org.hawkular.inventory.api.model.Resource;
import org.hawkular.inventory.api.model.ResourceType;
import com.wordnik.swagger.annotations.Api;
import com.wordnik.swagger.annotations.ApiOperation;
import com.wordnik.swagger.annotations.ApiParam;
import com.wordnik.swagger.annotations.ApiResponse;
import com.wordnik.swagger.annotations.ApiResponses;
/**
* @author Lukas Krejci
* @since 0.4.0
*/
@javax.ws.rs.Path("/bulk")
@Produces(APPLICATION_JSON)
@Consumes(APPLICATION_JSON)
@Api(value = "/bulk", description = "Endpoint for bulk operations on inventory entities")
public class RestBulk extends RestBase {
private static CanonicalPath canonicalize(String path, CanonicalPath rootPath) {
Path p;
if (path == null || path.isEmpty()) {
p = rootPath;
} else {
p = Path.fromPartiallyUntypedString(path, rootPath, rootPath, Entity.class);
}
if (p.isRelative()) {
p = p.toRelativePath().applyTo(rootPath);
}
return p.toCanonicalPath();
}
private static void putStatus(Map<ElementType, Map<CanonicalPath, Integer>> statuses, ElementType et,
CanonicalPath cp, Integer status) {
Map<CanonicalPath, Integer> typeStatuses = statuses.get(et);
if (typeStatuses == null) {
typeStatuses = new HashMap<>();
statuses.put(et, typeStatuses);
}
typeStatuses.put(cp, status);
}
private static String arrow(Relationship.Blueprint b) {
switch (b.getDirection()) {
case both:
return "<-(" + b.getName() + ")->";
case outgoing:
return "-(" + b.getName() + ")->";
case incoming:
return "<-(" + b.getName() + ")-";
default:
throw new IllegalStateException("Unhandled direction type: " + b.getDirection());
}
}
private static WriteInterface<?, ?, ?, ?> step(Class<?> elementClass, Class<?> nextType,
ResolvableToSingle<?, ?> single) {
return ElementTypeVisitor.accept(elementClass,
new ElementTypeVisitor.Simple<WriteInterface<?, ?, ?, ?>, Void>() {
@Override
protected WriteInterface<?, ?, ?, ?> defaultAction() {
throw new IllegalArgumentException("Entity of type '" + nextType.getSimpleName() + "' cannot " +
"be created under an entity of type '" + elementClass.getSimpleName() + "'.");
}
@Override
public WriteInterface<?, ?, ?, ?> visitEnvironment(Void parameter) {
return ElementTypeVisitor.accept(nextType, new RejectingVisitor() {
@Override
public WriteInterface<?, ?, ?, ?> visitFeed(Void parameter) {
return ((Environments.Single) single).feeds();
}
@Override
public WriteInterface<?, ?, ?, ?> visitMetric(Void parameter) {
return ((Environments.Single) single).feedlessMetrics();
}
@Override
public WriteInterface<?, ?, ?, ?> visitResource(Void parameter) {
return ((Environments.Single) single).feedlessResources();
}
}, null);
}
@Override
public WriteInterface<?, ?, ?, ?> visitFeed(Void parameter) {
return ElementTypeVisitor.accept(nextType, new RejectingVisitor() {
@Override
public WriteInterface<?, ?, ?, ?> visitMetric(Void parameter) {
return ((Feeds.Single) single).metrics();
}
@Override
public WriteInterface<?, ?, ?, ?> visitMetricType(Void parameter) {
return ((Feeds.Single) single).metricTypes();
}
@Override
public WriteInterface<?, ?, ?, ?> visitResource(Void parameter) {
return ((Feeds.Single) single).resources();
}
@Override
public WriteInterface<?, ?, ?, ?> visitResourceType(Void parameter) {
return ((Feeds.Single) single).resourceTypes();
}
}, null);
}
@Override
public WriteInterface<?, ?, ?, ?> visitMetric(Void parameter) {
return defaultAction();
}
@Override
public WriteInterface<?, ?, ?, ?> visitMetricType(Void parameter) {
return defaultAction();
}
@Override
public WriteInterface<?, ?, ?, ?> visitOperationType(Void parameter) {
return ElementTypeVisitor.accept(nextType, new RejectingVisitor() {
@Override
public WriteInterface<?, ?, ?, ?> visitData(Void parameter) {
return ((OperationTypes.Single) single).data();
}
}, null);
}
@Override
public WriteInterface<?, ?, ?, ?> visitResource(Void parameter) {
return ElementTypeVisitor.accept(nextType, new RejectingVisitor() {
@Override
public WriteInterface<?, ?, ?, ?> visitData(Void parameter) {
return ((Resources.Single) single).data();
}
@Override
public WriteInterface<?, ?, ?, ?> visitResource(Void parameter) {
return ((Resources.Single) single).containedChildren();
}
}, null);
}
@Override
public WriteInterface<?, ?, ?, ?> visitResourceType(Void parameter) {
return ElementTypeVisitor.accept(nextType, new RejectingVisitor() {
@Override
public WriteInterface<?, ?, ?, ?> visitData(Void parameter) {
return ((ResourceTypes.Single) single).data();
}
@Override
public WriteInterface<?, ?, ?, ?> visitOperationType(Void parameter) {
return ((ResourceTypes.Single) single).operationTypes();
}
}, null);
}
@Override
public WriteInterface<?, ?, ?, ?> visitTenant(Void parameter) {
return ElementTypeVisitor.accept(nextType, new RejectingVisitor() {
@Override
public WriteInterface<?, ?, ?, ?> visitEnvironment(Void parameter) {
return ((Tenants.Single) single).environments();
}
@Override
public WriteInterface<?, ?, ?, ?> visitMetricType(Void parameter) {
return ((Tenants.Single) single).feedlessMetricTypes();
}
@Override
public WriteInterface<?, ?, ?, ?> visitResourceType(Void parameter) {
return ((Tenants.Single) single).feedlessResourceTypes();
}
}, null);
}
class RejectingVisitor extends ElementTypeVisitor.Simple<WriteInterface<?, ?, ?, ?>, Void> {
@Override
protected WriteInterface<?, ?, ?, ?> defaultAction() {
throw new IllegalArgumentException(
"Entity of type '" + nextType.getSimpleName() + "' cannot " +
"be created under an entity of type '" + elementClass.getSimpleName() +
"'.");
}
}
}, null);
}
private static ResolvableToSingle<?, ?> create(Blueprint b, WriteInterface<?, ?, ?, ?> wrt) {
return b.accept(new ElementBlueprintVisitor.Simple<ResolvableToSingle<?, ?>, Void>() {
@SuppressWarnings("unchecked")
@Override
public ResolvableToSingle<?, ?> visitData(DataEntity.Blueprint<?> data, Void parameter) {
return ((Data.ReadWrite) wrt).create(data);
}
@Override
public ResolvableToSingle<?, ?> visitEnvironment(Environment.Blueprint environment, Void parameter) {
return ((Environments.ReadWrite) wrt).create(environment);
}
@Override
public ResolvableToSingle<?, ?> visitFeed(Feed.Blueprint feed, Void parameter) {
return ((Feeds.ReadWrite) wrt).create(feed);
}
@Override public ResolvableToSingle<?, ?> visitMetric(Metric.Blueprint metric, Void parameter) {
return ((Metrics.ReadWrite) wrt).create(metric);
}
@Override public ResolvableToSingle<?, ?> visitMetricType(MetricType.Blueprint metricType, Void parameter) {
return ((MetricTypes.ReadWrite) wrt).create(metricType);
}
@Override
public ResolvableToSingle<?, ?> visitOperationType(OperationType.Blueprint operationType, Void parameter) {
return ((OperationTypes.ReadWrite) wrt).create(operationType);
}
@Override
public ResolvableToSingle<?, ?> visitResource(Resource.Blueprint resource, Void parameter) {
return ((Resources.ReadWrite) wrt).create(resource);
}
@Override
public ResolvableToSingle<?, ?> visitResourceType(ResourceType.Blueprint type, Void parameter) {
return ((ResourceTypes.ReadWrite) wrt).create(type);
}
}, null);
}
@POST
@javax.ws.rs.Path("/")
@ApiOperation("Bulk creation of new entities. The response body contains details about results of creation" +
" of individual entities. The return value is a map where keys are types of entities created and values" +
" are again maps where keys are the canonical paths of the entities to be created and values are HTTP" +
" status codes - 201 OK, 400 if invalid path is supplied, 409 if the entity already exists on given path" +
" or 500 in case of internal error.")
@ApiResponses({
@ApiResponse(code = 201, message = "Entities successfully created"),
})
public Response addEntities(@ApiParam("This is a map where keys are paths to the parents under which entities " +
"should be created. The values are again maps where keys are one of [environment, resourceType, " +
"metricType, operationType, feed, resource, metric, dataEntity, relationship] and values are arrays of " +
"blueprints of entities of the corresponding types.") Map<String, Map<ElementType, List<Object>>> entities,
@Context UriInfo uriInfo) {
CanonicalPath rootPath = CanonicalPath.of().tenant(getTenantId()).get();
Map<ElementType, Map<CanonicalPath, Integer>> statuses = bulkCreate(entities, rootPath);
return Response.status(CREATED).entity(statuses).build();
}
private Map<ElementType, Map<CanonicalPath, Integer>> bulkCreate(
Map<String, Map<ElementType, List<Object>>> entities, CanonicalPath rootPath) {
Map<ElementType, Map<CanonicalPath, Integer>> statuses = new HashMap<>();
TransactionFrame transaction = inventory.newTransactionFrame();
Inventory binv = transaction.boundInventory();
IdExtractor idExtractor = new IdExtractor();
try {
for (Map.Entry<String, Map<ElementType, List<Object>>> e : entities.entrySet()) {
Map<ElementType, List<Object>> allBlueprints = e.getValue();
CanonicalPath parentPath = canonicalize(e.getKey(), rootPath);
ResolvableToSingle<?, ?> single = binv.inspect(parentPath, ResolvableToSingle.class);
for (Map.Entry<ElementType, List<Object>> ee : allBlueprints.entrySet()) {
ElementType elementType = ee.getKey();
List<Object> rawBlueprints = ee.getValue();
List<Blueprint> blueprints = deserializeBlueprints(elementType, rawBlueprints);
if (elementType == ElementType.relationship) {
bulkCreateRelationships(statuses, parentPath,
(ResolvableToSingleWithRelationships<?, ?>) single, elementType, blueprints);
} else {
bulkCreateEntity(statuses, idExtractor, parentPath, single, elementType, blueprints);
}
}
}
transaction.commit();
return statuses;
} catch (Throwable t) {
// TODO this potentially leaves behind the security resources of the entities that have been created
// before the transaction failure
transaction.rollback();
throw t;
}
}
private List<Blueprint> deserializeBlueprints(ElementType elementType, List<Object> rawBlueprints) {
return rawBlueprints.stream().map((o) -> {
try {
String js = mapper.writeValueAsString(o);
return (Blueprint) mapper.reader(elementType.blueprintType).readValue(js);
} catch (IOException e1) {
throw new IllegalArgumentException("Failed to deserialize as " + elementType
.blueprintType + " the following data: " + o, e1);
}
}).collect(Collectors.toList());
}
private void bulkCreateEntity(Map<ElementType, Map<CanonicalPath, Integer>> statuses,
IdExtractor idExtractor, CanonicalPath parentPath,
ResolvableToSingle<?, ?> single, ElementType elementType,
List<Blueprint> blueprints) {
if (!parentPath.modified().canExtendTo(elementType.elementType)) {
putStatus(statuses, elementType, parentPath, BAD_REQUEST.getStatusCode());
return;
}
if (!canCreateUnderParent(elementType, parentPath)) {
for (Blueprint b : blueprints) {
String id = b.accept(idExtractor, null);
putStatus(statuses, elementType, parentPath.extend(elementType.elementType, id).get(),
FORBIDDEN.getStatusCode());
}
return;
}
for (Blueprint b : blueprints) {
String id = b.accept(idExtractor, null);
CanonicalPath childPath = parentPath.extend(elementType.elementType, id).get();
WriteInterface<?, ?, ?, ?> wrt =
step(parentPath.getSegment().getElementType(), elementType
.elementType, single);
try {
create(b, wrt);
putStatus(statuses, elementType, childPath, CREATED.getStatusCode());
} catch (EntityAlreadyExistsException ex) {
putStatus(statuses, elementType, childPath, CONFLICT.getStatusCode());
} catch (Exception ex) {
RestApiLogger.LOGGER.failedToCreateBulkEntity(childPath, ex);
putStatus(statuses, elementType, childPath, INTERNAL_SERVER_ERROR.getStatusCode());
}
}
}
private void bulkCreateRelationships(Map<ElementType, Map<CanonicalPath, Integer>> statuses,
CanonicalPath parentPath, ResolvableToSingleWithRelationships<?, ?> single,
ElementType elementType, List<Blueprint> blueprints) {
if (!security.canAssociateFrom(parentPath)) {
for (Blueprint b : blueprints) {
Relationship.Blueprint rb = (Relationship.Blueprint) b;
String id = parentPath.toString() + arrow(rb) + rb.getOtherEnd();
putStatus(statuses, elementType, parentPath.extend(elementType.elementType, id).get(),
FORBIDDEN.getStatusCode());
}
return;
}
for (Blueprint b : blueprints) {
Relationship.Blueprint rb = (Relationship.Blueprint) b;
try {
Relationships.Single rel = single.relationships(rb.getDirection())
.linkWith(rb.getName(), rb.getOtherEnd(), rb.getProperties());
putStatus(statuses, elementType, rel.entity().getPath(), CREATED.getStatusCode());
} catch (EntityNotFoundException ex) {
String fakeId = parentPath.toString() + arrow(rb) + rb.getOtherEnd().toString();
putStatus(statuses, elementType, CanonicalPath.of().relationship(fakeId).get(),
NOT_FOUND.getStatusCode());
}
}
}
private boolean canCreateUnderParent(ElementType elementType, CanonicalPath parentPath) {
switch (elementType) {
case dataEntity:
return security.canUpdate(parentPath);
case relationship:
throw new IllegalArgumentException("Cannot create anything under a relationship.");
default:
return security.canCreate(elementType.elementType).under(parentPath);
}
}
private enum ElementType {
environment(Environment.class, Environment.Blueprint.class),
resourceType(ResourceType.class, ResourceType.Blueprint.class),
metricType(MetricType.class, MetricType.Blueprint.class),
operationType(OperationType.class, OperationType.Blueprint.class),
feed(Feed.class, Feed.Blueprint.class),
metric(Metric.class, Metric.Blueprint.class),
resource(Resource.class, Resource.Blueprint.class),
dataEntity(DataEntity.class, DataEntity.Blueprint.class),
relationship(Relationship.class, Relationship.Blueprint.class);
final Class<? extends AbstractElement<?, ?>> elementType;
final Class<? extends Blueprint> blueprintType;
ElementType(Class<? extends AbstractElement<?, ?>> elementType, Class<? extends Blueprint> blueprintType) {
this.elementType = elementType;
this.blueprintType = blueprintType;
}
}
private static class IdExtractor extends ElementBlueprintVisitor.Simple<String, Void> {
@Override
protected String defaultAction(Object blueprint, Void parameter) {
return ((Entity.Blueprint) blueprint).getId();
}
@Override
public String visitData(DataEntity.Blueprint<?> data, Void parameter) {
return data.getRole().name();
}
@Override
public String visitRelationship(Relationship.Blueprint relationship, Void parameter) {
return arrow(relationship) + relationship.getOtherEnd().toString();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.wal;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.codehaus.jackson.map.ObjectMapper;
/**
* WALPrettyPrinter prints the contents of a given WAL with a variety of
* options affecting formatting and extent of content.
*
* It targets two usage cases: pretty printing for ease of debugging directly by
* humans, and JSON output for consumption by monitoring and/or maintenance
* scripts.
*
* It can filter by row, region, or sequence id.
*
* It can also toggle output of values.
*
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
@InterfaceStability.Evolving
public class WALPrettyPrinter {
private boolean outputValues;
private boolean outputJSON;
// The following enable filtering by sequence, region, and row, respectively
private long sequence;
private String region;
private String row;
// enable in order to output a single list of transactions from several files
private boolean persistentOutput;
private boolean firstTxn;
// useful for programmatic capture of JSON output
private PrintStream out;
// for JSON encoding
private static final ObjectMapper MAPPER = new ObjectMapper();
/**
* Basic constructor that simply initializes values to reasonable defaults.
*/
public WALPrettyPrinter() {
outputValues = false;
outputJSON = false;
sequence = -1;
region = null;
row = null;
persistentOutput = false;
firstTxn = true;
out = System.out;
}
/**
* Fully specified constructor.
*
* @param outputValues
* when true, enables output of values along with other log
* information
* @param outputJSON
* when true, enables output in JSON format rather than a
* "pretty string"
* @param sequence
* when nonnegative, serves as a filter; only log entries with this
* sequence id will be printed
* @param region
* when not null, serves as a filter; only log entries from this
* region will be printed
* @param row
* when not null, serves as a filter; only log entries from this row
* will be printed
* @param persistentOutput
* keeps a single list running for multiple files. if enabled, the
* endPersistentOutput() method must be used!
* @param out
* Specifies an alternative to stdout for the destination of this
* PrettyPrinter's output.
*/
public WALPrettyPrinter(boolean outputValues, boolean outputJSON,
long sequence, String region, String row, boolean persistentOutput,
PrintStream out) {
this.outputValues = outputValues;
this.outputJSON = outputJSON;
this.sequence = sequence;
this.region = region;
this.row = row;
this.persistentOutput = persistentOutput;
if (persistentOutput) {
beginPersistentOutput();
}
this.out = out;
this.firstTxn = true;
}
/**
* turns value output on
*/
public void enableValues() {
outputValues = true;
}
/**
* turns value output off
*/
public void disableValues() {
outputValues = false;
}
/**
* turns JSON output on
*/
public void enableJSON() {
outputJSON = true;
}
/**
* turns JSON output off, and turns on "pretty strings" for human consumption
*/
public void disableJSON() {
outputJSON = false;
}
/**
* sets the region by which output will be filtered
*
* @param sequence
* when nonnegative, serves as a filter; only log entries with this
* sequence id will be printed
*/
public void setSequenceFilter(long sequence) {
this.sequence = sequence;
}
/**
* sets the region by which output will be filtered
*
* @param region
* when not null, serves as a filter; only log entries from this
* region will be printed
*/
public void setRegionFilter(String region) {
this.region = region;
}
/**
* sets the region by which output will be filtered
*
* @param row
* when not null, serves as a filter; only log entries from this row
* will be printed
*/
public void setRowFilter(String row) {
this.row = row;
}
/**
* enables output as a single, persistent list. at present, only relevant in
* the case of JSON output.
*/
public void beginPersistentOutput() {
if (persistentOutput)
return;
persistentOutput = true;
firstTxn = true;
if (outputJSON)
out.print("[");
}
/**
* ends output of a single, persistent list. at present, only relevant in the
* case of JSON output.
*/
public void endPersistentOutput() {
if (!persistentOutput)
return;
persistentOutput = false;
if (outputJSON)
out.print("]");
}
/**
* reads a log file and outputs its contents, one transaction at a time, as
* specified by the currently configured options
*
* @param conf
* the HBase configuration relevant to this log file
* @param p
* the path of the log file to be read
* @throws IOException
* may be unable to access the configured filesystem or requested
* file.
*/
public void processFile(final Configuration conf, final Path p)
throws IOException {
FileSystem fs = FileSystem.get(conf);
if (!fs.exists(p)) {
throw new FileNotFoundException(p.toString());
}
if (!fs.isFile(p)) {
throw new IOException(p + " is not a file");
}
WAL.Reader log = WALFactory.createReader(fs, p, conf);
if (log instanceof ProtobufLogReader) {
List<String> writerClsNames = ((ProtobufLogReader) log).getWriterClsNames();
if (writerClsNames != null && writerClsNames.size() > 0) {
out.print("Writer Classes: ");
for (int i = 0; i < writerClsNames.size(); i++) {
out.print(writerClsNames.get(i));
if (i != writerClsNames.size() - 1) {
out.print(" ");
}
}
out.println();
}
String cellCodecClsName = ((ProtobufLogReader) log).getCodecClsName();
if (cellCodecClsName != null) {
out.println("Cell Codec Class: " + cellCodecClsName);
}
}
if (outputJSON && !persistentOutput) {
out.print("[");
firstTxn = true;
}
try {
WAL.Entry entry;
while ((entry = log.next()) != null) {
WALKey key = entry.getKey();
WALEdit edit = entry.getEdit();
// begin building a transaction structure
Map<String, Object> txn = key.toStringMap();
long writeTime = key.getWriteTime();
// check output filters
if (sequence >= 0 && ((Long) txn.get("sequence")) != sequence)
continue;
if (region != null && !((String) txn.get("region")).equals(region))
continue;
// initialize list into which we will store atomic actions
List<Map> actions = new ArrayList<Map>();
for (Cell cell : edit.getCells()) {
// add atomic operation to txn
Map<String, Object> op = new HashMap<String, Object>(toStringMap(cell));
if (outputValues) op.put("value", Bytes.toStringBinary(CellUtil.cloneValue(cell)));
// check row output filter
if (row == null || ((String) op.get("row")).equals(row)) {
actions.add(op);
}
}
if (actions.size() == 0)
continue;
txn.put("actions", actions);
if (outputJSON) {
// JSON output is a straightforward "toString" on the txn object
if (firstTxn)
firstTxn = false;
else
out.print(",");
// encode and print JSON
out.print(MAPPER.writeValueAsString(txn));
} else {
// Pretty output, complete with indentation by atomic action
out.println("Sequence=" + txn.get("sequence") + " "
+ ", region=" + txn.get("region") + " at write timestamp=" + new Date(writeTime));
for (int i = 0; i < actions.size(); i++) {
Map op = actions.get(i);
out.println("row=" + op.get("row") +
", column=" + op.get("family") + ":" + op.get("qualifier"));
if (op.get("tag") != null) {
out.println(" tag: " + op.get("tag"));
}
if (outputValues) out.println(" value: " + op.get("value"));
}
}
}
} finally {
log.close();
}
if (outputJSON && !persistentOutput) {
out.print("]");
}
}
private static Map<String, Object> toStringMap(Cell cell) {
Map<String, Object> stringMap = new HashMap<String, Object>();
stringMap.put("row",
Bytes.toStringBinary(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
stringMap.put("family", Bytes.toStringBinary(cell.getFamilyArray(), cell.getFamilyOffset(),
cell.getFamilyLength()));
stringMap.put("qualifier",
Bytes.toStringBinary(cell.getQualifierArray(), cell.getQualifierOffset(),
cell.getQualifierLength()));
stringMap.put("timestamp", cell.getTimestamp());
stringMap.put("vlen", cell.getValueLength());
if (cell.getTagsLength() > 0) {
List<String> tagsString = new ArrayList<String>();
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next();
tagsString.add((tag.getType()) + ":"
+ Bytes.toStringBinary(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()));
}
stringMap.put("tag", tagsString);
}
return stringMap;
}
public static void main(String[] args) throws IOException {
run(args);
}
/**
* Pass one or more log file names and formatting options and it will dump out
* a text version of the contents on <code>stdout</code>.
*
* @param args
* Command line arguments
* @throws IOException
* Thrown upon file system errors etc.
*/
public static void run(String[] args) throws IOException {
// create options
Options options = new Options();
options.addOption("h", "help", false, "Output help message");
options.addOption("j", "json", false, "Output JSON");
options.addOption("p", "printvals", false, "Print values");
options.addOption("r", "region", true,
"Region to filter by. Pass encoded region name; e.g. '9192caead6a5a20acb4454ffbc79fa14'");
options.addOption("s", "sequence", true,
"Sequence to filter by. Pass sequence number.");
options.addOption("w", "row", true, "Row to filter by. Pass row name.");
WALPrettyPrinter printer = new WALPrettyPrinter();
CommandLineParser parser = new PosixParser();
List<?> files = null;
try {
CommandLine cmd = parser.parse(options, args);
files = cmd.getArgList();
if (files.size() == 0 || cmd.hasOption("h")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("WAL <filename...>", options, true);
System.exit(-1);
}
// configure the pretty printer using command line options
if (cmd.hasOption("p"))
printer.enableValues();
if (cmd.hasOption("j"))
printer.enableJSON();
if (cmd.hasOption("r"))
printer.setRegionFilter(cmd.getOptionValue("r"));
if (cmd.hasOption("s"))
printer.setSequenceFilter(Long.parseLong(cmd.getOptionValue("s")));
if (cmd.hasOption("w"))
printer.setRowFilter(cmd.getOptionValue("w"));
} catch (ParseException e) {
e.printStackTrace();
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("HFile filename(s) ", options, true);
System.exit(-1);
}
// get configuration, file system, and process the given files
Configuration conf = HBaseConfiguration.create();
FSUtils.setFsDefault(conf, FSUtils.getRootDir(conf));
// begin output
printer.beginPersistentOutput();
for (Object f : files) {
Path file = new Path((String) f);
FileSystem fs = file.getFileSystem(conf);
if (!fs.exists(file)) {
System.err.println("ERROR, file doesnt exist: " + file);
return;
}
printer.processFile(conf, file);
}
printer.endPersistentOutput();
}
}
| |
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.codec.impl;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collection;
import java.util.EnumMap;
import com.fasterxml.jackson.databind.JsonNode;
import org.hamcrest.MatcherAssert;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.onlab.packet.Ip6Address;
import org.onlab.packet.IpPrefix;
import org.onlab.packet.MacAddress;
import org.onlab.packet.MplsLabel;
import org.onlab.packet.TpPort;
import org.onlab.packet.VlanId;
import org.onosproject.codec.CodecContext;
import org.onosproject.codec.JsonCodec;
import org.onosproject.net.ChannelSpacing;
import org.onosproject.net.GridType;
import org.onosproject.net.Lambda;
import org.onosproject.net.OchSignalType;
import org.onosproject.net.OduSignalId;
import org.onosproject.net.OduSignalType;
import org.onosproject.net.PortNumber;
import org.onosproject.net.flow.criteria.Criteria;
import org.onosproject.net.flow.criteria.Criterion;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.onosproject.net.flow.criteria.PiCriterion;
import org.onosproject.net.pi.model.PiMatchFieldId;
import org.onosproject.net.pi.runtime.PiExactFieldMatch;
import org.onosproject.net.pi.runtime.PiFieldMatch;
import org.onosproject.net.pi.runtime.PiLpmFieldMatch;
import org.onosproject.net.pi.runtime.PiRangeFieldMatch;
import org.onosproject.net.pi.runtime.PiTernaryFieldMatch;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.onlab.junit.TestUtils.getField;
import static org.onlab.util.ImmutableByteSequence.copyFrom;
import static org.onosproject.codec.impl.CriterionJsonMatcher.matchesCriterion;
/**
* Unit tests for criterion codec.
*/
public class CriterionCodecTest {
CodecContext context;
JsonCodec<Criterion> criterionCodec;
final PortNumber port = PortNumber.portNumber(1);
final IpPrefix ipPrefix4 = IpPrefix.valueOf("10.1.1.0/24");
final IpPrefix ipPrefix6 = IpPrefix.valueOf("fe80::/64");
final MacAddress mac1 = MacAddress.valueOf("00:00:11:00:00:01");
final MacAddress mcastMac = MacAddress.valueOf("01:00:5E:00:00:01");
final MacAddress mcastMacMask = MacAddress.valueOf("FF:FF:FF:80:00:00");
final TpPort tpPort = TpPort.tpPort(40000);
final int tributaryPortNumber = 11;
final int tributarySlotLen = 80;
final byte[] tributarySlotBitmap = new byte[] {1, 2, 3, 4, 2, 3, 4, 2, 3, 4};
/**
* Sets up for each test. Creates a context and fetches the criterion
* codec.
*/
@Before
public void setUp() {
context = new MockCodecContext();
criterionCodec = context.codec(Criterion.class);
assertThat(criterionCodec, notNullValue());
}
/**
* Checks that all criterion types are covered by the codec.
*/
@Test
public void checkCriterionTypes() throws Exception {
EncodeCriterionCodecHelper encoder = new EncodeCriterionCodecHelper(
Criteria.dummy(), context);
EnumMap<Criterion.Type, Object> formatMap =
getField(encoder, "formatMap");
assertThat(formatMap, notNullValue());
for (Criterion.Type type : Criterion.Type.values()) {
assertThat("Entry not found for " + type.toString(),
formatMap.get(type), notNullValue());
}
}
/**
* Tests in port criterion.
*/
@Test
public void matchInPortTest() {
Criterion criterion = Criteria.matchInPort(port);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests in physical port criterion.
*/
@Test
public void matchInPhyPortTest() {
Criterion criterion = Criteria.matchInPhyPort(port);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests metadata criterion.
*/
@Test
public void matchMetadataTest() {
Criterion criterion = Criteria.matchMetadata(0xabcdL);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests ethernet destination criterion.
*/
@Test
public void matchEthDstTest() {
Criterion criterion = Criteria.matchEthDst(mac1);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests masked ethernet destination criterion (Criterion.Type.ETH_DST_MASKED).
*/
@Test
public void matchEthDstMaskTest() {
Criterion criterion = Criteria.matchEthDstMasked(mcastMac, mcastMacMask);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests ethernet source criterion.
*/
@Test
public void matchEthSrcTest() {
Criterion criterion = Criteria.matchEthSrc(mac1);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests ethernet type criterion.
*/
@Test
public void matchEthTypeTest() {
Criterion criterion = Criteria.matchEthType((short) 0x8844);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests VLAN Id criterion.
*/
@Test
public void matchVlanIdTest() {
Criterion criterion = Criteria.matchVlanId(VlanId.ANY);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests VLAN PCP criterion.
*/
@Test
public void matchVlanPcpTest() {
Criterion criterion = Criteria.matchVlanPcp((byte) 7);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests IP DSCP criterion.
*/
@Test
public void matchIPDscpTest() {
Criterion criterion = Criteria.matchIPDscp((byte) 63);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests IP ECN criterion.
*/
@Test
public void matchIPEcnTest() {
Criterion criterion = Criteria.matchIPEcn((byte) 3);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests IP protocol criterion.
*/
@Test
public void matchIPProtocolTest() {
Criterion criterion = Criteria.matchIPProtocol((byte) 250);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests IP source criterion.
*/
@Test
public void matchIPSrcTest() {
Criterion criterion = Criteria.matchIPSrc(ipPrefix4);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests IP destination criterion.
*/
@Test
public void matchIPDstTest() {
Criterion criterion = Criteria.matchIPDst(ipPrefix4);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests source TCP port criterion.
*/
@Test
public void matchTcpSrcTest() {
Criterion criterion = Criteria.matchTcpSrc(tpPort);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests destination TCP port criterion.
*/
@Test
public void matchTcpDstTest() {
Criterion criterion = Criteria.matchTcpDst(tpPort);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests source UDP port criterion.
*/
@Test
public void matchUdpSrcTest() {
Criterion criterion = Criteria.matchUdpSrc(tpPort);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests destination UDP criterion.
*/
@Test
public void matchUdpDstTest() {
Criterion criterion = Criteria.matchUdpDst(tpPort);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests source SCTP criterion.
*/
@Test
public void matchSctpSrcTest() {
Criterion criterion = Criteria.matchSctpSrc(tpPort);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests destination SCTP criterion.
*/
@Test
public void matchSctpDstTest() {
Criterion criterion = Criteria.matchSctpDst(tpPort);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests ICMP type criterion.
*/
@Test
public void matchIcmpTypeTest() {
Criterion criterion = Criteria.matchIcmpType((byte) 250);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests ICMP code criterion.
*/
@Test
public void matchIcmpCodeTest() {
Criterion criterion = Criteria.matchIcmpCode((byte) 250);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests IPv6 source criterion.
*/
@Test
public void matchIPv6SrcTest() {
Criterion criterion = Criteria.matchIPv6Src(ipPrefix6);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests IPv6 destination criterion.
*/
@Test
public void matchIPv6DstTest() {
Criterion criterion = Criteria.matchIPv6Dst(ipPrefix6);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests IPv6 flow label criterion.
*/
@Test
public void matchIPv6FlowLabelTest() {
Criterion criterion = Criteria.matchIPv6FlowLabel(0xffffe);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests ICMP v6 type criterion.
*/
@Test
public void matchIcmpv6TypeTest() {
Criterion criterion = Criteria.matchIcmpv6Type((byte) 250);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests ICMP v6 code criterion.
*/
@Test
public void matchIcmpv6CodeTest() {
Criterion criterion = Criteria.matchIcmpv6Code((byte) 250);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests IPV6 target address criterion.
*/
@Test
public void matchIPv6NDTargetAddressTest() {
Criterion criterion =
Criteria.matchIPv6NDTargetAddress(
Ip6Address.valueOf("1111:2222::"));
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests IPV6 SLL criterion.
*/
@Test
public void matchIPv6NDSourceLinkLayerAddressTest() {
Criterion criterion = Criteria.matchIPv6NDSourceLinkLayerAddress(mac1);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests IPV6 TLL criterion.
*/
@Test
public void matchIPv6NDTargetLinkLayerAddressTest() {
Criterion criterion = Criteria.matchIPv6NDTargetLinkLayerAddress(mac1);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests MPLS label criterion.
*/
@Test
public void matchMplsLabelTest() {
Criterion criterion = Criteria.matchMplsLabel(MplsLabel.mplsLabel(0xffffe));
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests IPv6 Extension Header pseudo-field flags criterion.
*/
@Test
public void matchIPv6ExthdrFlagsTest() {
int exthdrFlags =
Criterion.IPv6ExthdrFlags.NONEXT.getValue() |
Criterion.IPv6ExthdrFlags.ESP.getValue() |
Criterion.IPv6ExthdrFlags.AUTH.getValue() |
Criterion.IPv6ExthdrFlags.DEST.getValue() |
Criterion.IPv6ExthdrFlags.FRAG.getValue() |
Criterion.IPv6ExthdrFlags.ROUTER.getValue() |
Criterion.IPv6ExthdrFlags.HOP.getValue() |
Criterion.IPv6ExthdrFlags.UNREP.getValue() |
Criterion.IPv6ExthdrFlags.UNSEQ.getValue();
Criterion criterion = Criteria.matchIPv6ExthdrFlags(exthdrFlags);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests lambda criterion.
*/
@Test
public void matchOchSignal() {
Lambda ochSignal = Lambda.ochSignal(GridType.DWDM, ChannelSpacing.CHL_100GHZ, 4, 8);
Criterion criterion = Criteria.matchLambda(ochSignal);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests Och signal type criterion.
*/
@Test
public void matchOchSignalTypeTest() {
Criterion criterion = Criteria.matchOchSignalType(OchSignalType.FIXED_GRID);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests Odu Signal ID criterion.
*/
@Test
public void matchOduSignalIdTest() {
OduSignalId oduSignalId = OduSignalId.oduSignalId(tributaryPortNumber, tributarySlotLen, tributarySlotBitmap);
Criterion criterion = Criteria.matchOduSignalId(oduSignalId);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests Odu Signal Type criterion.
*/
@Test
public void matchOduSignalTypeTest() {
OduSignalType signalType = OduSignalType.ODU2;
Criterion criterion = Criteria.matchOduSignalType(signalType);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
/**
* Tests protocol-independent type criterion encoding.
*/
@Test
public void matchPiTypeEncodingTest() {
PiMatchFieldId ethMatchFieldId = PiMatchFieldId.of("ethernet_t.etherType");
byte[] matchExactBytes1 = {0x08, 0x00};
Criterion exactBytesCriterion = PiCriterion.builder().matchExact(ethMatchFieldId, matchExactBytes1).build();
ObjectNode exactResult = criterionCodec.encode(exactBytesCriterion, context);
assertThat(exactResult, matchesCriterion(exactBytesCriterion));
PiMatchFieldId ipv4MatchFieldId = PiMatchFieldId.of("ipv4_t.dstAddr");
int mask = 0x00ffffff;
byte[] matchLpmBytes1 = {0x0a, 0x01, 0x01, 0x01};
Criterion lpmBytesCriterion = PiCriterion.builder().matchLpm(ipv4MatchFieldId, matchLpmBytes1, mask).build();
ObjectNode lpmResult = criterionCodec.encode(lpmBytesCriterion, context);
assertThat(lpmResult, matchesCriterion(lpmBytesCriterion));
byte[] matchTernaryBytes1 = {0x0a, 0x01, 0x01, 0x01};
byte[] matchTernaryMaskBytes = {0x7f, 0x7f, 0x7f, 0x00};
Criterion ternaryBytesCriterion = PiCriterion.builder().matchTernary(ipv4MatchFieldId, matchTernaryBytes1,
matchTernaryMaskBytes).build();
ObjectNode ternaryResult = criterionCodec.encode(ternaryBytesCriterion, context);
assertThat(ternaryResult, matchesCriterion(ternaryBytesCriterion));
byte[] matchRangeBytes1 = {0x10};
byte[] matchRangeHighBytes = {0x30};
Criterion rangeBytesCriterion = PiCriterion.builder()
.matchRange(ipv4MatchFieldId, matchRangeBytes1, matchRangeHighBytes).build();
ObjectNode rangeResult = criterionCodec.encode(rangeBytesCriterion, context);
assertThat(rangeResult, matchesCriterion(rangeBytesCriterion));
}
/**
* Tests protocol-independent type criterion decoding.
*/
@Test
public void matchPiTypeDecodingTest() throws IOException {
Criterion criterion = getCriterion("PiCriterion.json");
Assert.assertThat(criterion.type(), is(Criterion.Type.PROTOCOL_INDEPENDENT));
Collection<PiFieldMatch> piFieldMatches = ((PiCriterion) criterion).fieldMatches();
for (PiFieldMatch piFieldMatch : piFieldMatches) {
switch (piFieldMatch.type()) {
case EXACT:
Assert.assertThat(piFieldMatch.fieldId().id(), is("ingress_port"));
Assert.assertThat(((PiExactFieldMatch) piFieldMatch).value(), is(
copyFrom((byte) 0x10)));
break;
case LPM:
Assert.assertThat(piFieldMatch.fieldId().id(), is("src_addr"));
Assert.assertThat(((PiLpmFieldMatch) piFieldMatch).value(),
is(copyFrom(0xa010101)));
Assert.assertThat(((PiLpmFieldMatch) piFieldMatch).prefixLength(), is(24));
break;
case TERNARY:
Assert.assertThat(piFieldMatch.fieldId().id(), is("dst_addr"));
Assert.assertThat(((PiTernaryFieldMatch) piFieldMatch).value(),
is(copyFrom(0xa010101)));
Assert.assertThat(((PiTernaryFieldMatch) piFieldMatch).mask(),
is(copyFrom(0xfffffff)));
break;
case RANGE:
Assert.assertThat(piFieldMatch.fieldId().id(), is("egress_port"));
Assert.assertThat(((PiRangeFieldMatch) piFieldMatch).highValue(),
is(copyFrom((byte) 0x20)));
Assert.assertThat(((PiRangeFieldMatch) piFieldMatch).lowValue(),
is(copyFrom((byte) 0x10)));
break;
default:
Assert.fail();
}
}
}
/**
* Reads in a criterion from the given resource and decodes it.
*
* @param resourceName resource to use to read the JSON for the rule
* @return decoded criterion
* @throws IOException if processing the resource fails
*/
private Criterion getCriterion(String resourceName) throws IOException {
InputStream jsonStream = CriterionCodecTest.class.getResourceAsStream(resourceName);
JsonNode json = context.mapper().readTree(jsonStream);
MatcherAssert.assertThat(json, notNullValue());
Criterion criterion = criterionCodec.decode((ObjectNode) json, context);
Assert.assertThat(criterion, notNullValue());
return criterion;
}
}
| |
/*
* Copyright 2015 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.server;
import static java.util.Objects.requireNonNull;
import java.net.IDN;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.function.Function;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import com.github.kristofa.brave.internal.Nullable;
import com.linecorp.armeria.common.Request;
import com.linecorp.armeria.common.Response;
import io.netty.handler.ssl.SslContext;
import io.netty.util.DomainNameMapping;
import io.netty.util.DomainNameMappingBuilder;
/**
* A <a href="https://en.wikipedia.org/wiki/Virtual_hosting#Name-based">name-based virtual host</a>.
* A {@link VirtualHost} contains the following information:
* <ul>
* <li>the hostname pattern, as defined in
* <a href="http://tools.ietf.org/html/rfc2818#section-3.1">the section 3.1 of RFC2818</a></li>
* <li>{@link SslContext} if TLS is enabled</li>
* <li>the list of available {@link Service}s and their {@link PathMapping}s</li>
* </ul>
*
* @see VirtualHostBuilder
*/
public final class VirtualHost {
private static final Pattern HOSTNAME_PATTERN = Pattern.compile(
"^(?:[-_a-zA-Z0-9]|[-_a-zA-Z0-9][-_.a-zA-Z0-9]*[-_a-zA-Z0-9])$");
/**
* Initialized later by {@link ServerConfig} via {@link #setServerConfig(ServerConfig)}.
*/
private ServerConfig serverConfig;
private final String defaultHostname;
private final String hostnamePattern;
private final SslContext sslContext;
private final List<ServiceConfig> services;
private final PathMappings<ServiceConfig> serviceMapping = new PathMappings<>();
private String strVal;
VirtualHost(String defaultHostname, String hostnamePattern,
SslContext sslContext, Iterable<ServiceConfig> serviceConfigs) {
defaultHostname = normalizeDefaultHostname(defaultHostname);
hostnamePattern = normalizeHostnamePattern(hostnamePattern);
ensureHostnamePatternMatchesDefaultHostname(hostnamePattern, defaultHostname);
this.defaultHostname = defaultHostname;
this.hostnamePattern = hostnamePattern;
this.sslContext = validateSslContext(sslContext);
requireNonNull(serviceConfigs, "serviceConfigs");
final List<ServiceConfig> servicesCopy = new ArrayList<>();
for (ServiceConfig c : serviceConfigs) {
c = c.build(this);
servicesCopy.add(c);
serviceMapping.add(c.pathMapping(), c);
}
services = Collections.unmodifiableList(servicesCopy);
serviceMapping.freeze();
}
/**
* IDNA ASCII conversion, case normalization and validation.
*/
static String normalizeDefaultHostname(String defaultHostname) {
requireNonNull(defaultHostname, "defaultHostname");
if (needsNormalization(defaultHostname)) {
defaultHostname = IDN.toASCII(defaultHostname, IDN.ALLOW_UNASSIGNED);
}
if (!HOSTNAME_PATTERN.matcher(defaultHostname).matches()) {
throw new IllegalArgumentException("defaultHostname: " + defaultHostname);
}
return defaultHostname.toLowerCase(Locale.ENGLISH);
}
/**
* IDNA ASCII conversion, case normalization and validation.
*/
static String normalizeHostnamePattern(String hostnamePattern) {
requireNonNull(hostnamePattern, "hostnamePattern");
if (needsNormalization(hostnamePattern)) {
hostnamePattern = IDN.toASCII(hostnamePattern, IDN.ALLOW_UNASSIGNED);
}
if (!"*".equals(hostnamePattern) &&
!HOSTNAME_PATTERN.matcher(hostnamePattern.startsWith("*.") ? hostnamePattern.substring(2)
: hostnamePattern).matches()) {
throw new IllegalArgumentException("hostnamePattern: " + hostnamePattern);
}
return hostnamePattern.toLowerCase(Locale.ENGLISH);
}
/**
* Ensure that 'hostnamePattern' matches 'defaultHostname'.
*/
static void ensureHostnamePatternMatchesDefaultHostname(String hostnamePattern, String defaultHostname) {
if ("*".equals(hostnamePattern)) {
return;
}
// Pretty convoluted way to validate but it's done only once and
// we don't need to duplicate the pattern matching logic.
final DomainNameMapping<Boolean> mapping =
new DomainNameMappingBuilder<>(Boolean.FALSE).add(hostnamePattern, Boolean.TRUE).build();
if (!mapping.map(defaultHostname)) {
throw new IllegalArgumentException(
"defaultHostname: " + defaultHostname +
" (must be matched by hostnamePattern: " + hostnamePattern + ')');
}
}
private static boolean needsNormalization(String hostnamePattern) {
final int length = hostnamePattern.length();
for (int i = 0; i < length; i++) {
int c = hostnamePattern.charAt(i);
if (c > 0x7F) {
return true;
}
}
return false;
}
static SslContext validateSslContext(SslContext sslContext) {
if (sslContext != null && !sslContext.isServer()) {
throw new IllegalArgumentException("sslContext: " + sslContext + " (expected: server context)");
}
return sslContext;
}
/**
* Returns the {@link Server} where this {@link VirtualHost} belongs to.
*/
public Server server() {
if (serverConfig == null) {
throw new IllegalStateException("server is not configured yet.");
}
return serverConfig.server();
}
void setServerConfig(ServerConfig serverConfig) {
if (this.serverConfig != null) {
throw new IllegalStateException("VirtualHost cannot be added to more than one Server.");
}
this.serverConfig = requireNonNull(serverConfig, "serverConfig");
}
/**
* Returns the default hostname of this virtual host.
*/
public String defaultHostname() {
return defaultHostname;
}
/**
* Returns the hostname pattern of this virtual host, as defined in
* <a href="http://tools.ietf.org/html/rfc2818#section-3.1">the section 3.1 of RFC2818</a>
*/
public String hostnamePattern() {
return hostnamePattern;
}
/**
* Returns the {@link SslContext} of this virtual host.
*/
public SslContext sslContext() {
return sslContext;
}
/**
* Returns the information about the {@link Service}s bound to this virtual host.
*/
public List<ServiceConfig> serviceConfigs() {
return services;
}
/**
* Finds the {@link Service} whose {@link PathMapping} matches the {@code path}.
*
* @return the {@link Service} wrapped by {@link PathMapped} if there's a match.
* {@link PathMapped#empty()} if there's no match.
*/
public PathMapped<ServiceConfig> findServiceConfig(String path) {
return serviceMapping.apply(path);
}
VirtualHost decorate(@Nullable Function<Service<Request, Response>, Service<Request, Response>> decorator) {
if (decorator == null) {
return this;
}
final List<ServiceConfig> services =
this.services.stream().map(cfg -> {
final PathMapping pathMapping = cfg.pathMapping();
final Service<Request, Response> service = decorator.apply(cfg.service());
final String loggerName = cfg.loggerNameWithoutPrefix();
return new ServiceConfig(pathMapping, service, loggerName);
}).collect(Collectors.toList());
return new VirtualHost(defaultHostname(), hostnamePattern(), sslContext(), services);
}
@Override
public String toString() {
String strVal = this.strVal;
if (strVal == null) {
this.strVal = strVal = toString(
getClass(), defaultHostname(), hostnamePattern(), sslContext(), serviceConfigs());
}
return strVal;
}
static String toString(Class<?> type, String defaultHostname, String hostnamePattern,
SslContext sslContext, List<?> services) {
StringBuilder buf = new StringBuilder();
if (type != null) {
buf.append(type.getSimpleName());
}
buf.append('(');
buf.append(defaultHostname);
buf.append('/');
buf.append(hostnamePattern);
buf.append(", ssl: ");
buf.append(sslContext != null);
buf.append(", services: ");
buf.append(services);
buf.append(')');
return buf.toString();
}
}
| |
/*
*******************************************************************************
* L O G I T A G S
* Software and Programming
* Dr. Wolfgang Winter
* Germany
*
* All rights reserved
*
* Copyright 2016 Dr. Wolfgang Winter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************
*/
package com.logitags.cibet.sensor.pojo;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
import javax.persistence.Transient;
import javax.script.ScriptEngine;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.logitags.cibet.actuator.dc.ResourceApplyException;
import com.logitags.cibet.core.CibetException;
import com.logitags.cibet.core.ControlEvent;
import com.logitags.cibet.resource.ParameterSequenceComparator;
import com.logitags.cibet.resource.ParameterType;
import com.logitags.cibet.resource.Resource;
import com.logitags.cibet.resource.ResourceParameter;
import com.logitags.cibet.sensor.common.Invoker;
@Entity
@DiscriminatorValue(value = "MethodResource")
public class MethodResource extends Resource {
/**
*
*/
private static final long serialVersionUID = 1L;
private static Log log = LogFactory.getLog(MethodResource.class);
/**
* the Invoker implementation class that is capable to provide an instance of the object class for executing the
* controlled event on it.
*/
private String invokerClass;
/**
* optional JNDI name of EJB or factory constructor
*/
private String invokerParam;
/**
* method name which is controlled
*/
private String method;
/**
* In case the Resource represents a method invocation, this is the Method reflection object.
*/
@Transient
private transient Method methodObject;
public MethodResource() {
}
/**
* constructor used for EJB and POJO resources
*
* @param invokedObject
* @param m
* @param params
*/
public MethodResource(Object invokedObject, Method m, Set<ResourceParameter> params) {
setMethodObject(m);
unencodedTargetObject = invokedObject;
if (params != null) {
setParameters(params);
}
resolveTarget(invokedObject);
}
/**
* copy constructor
*
* @param copy
*/
public MethodResource(MethodResource copy) {
super(copy);
setInvokerClass(copy.invokerClass);
setInvokerParam(copy.invokerParam);
methodObject = copy.getMethodObject();
setMethod(copy.getMethod());
}
/**
* concatenates the values for creating the checkSum.
*/
public String createCheckSumString() {
StringBuffer b = new StringBuffer(super.createCheckSumString());
b.append(invokerParam == null ? "" : invokerParam);
b.append(method == null ? "" : method);
return b.toString();
}
@Override
public void fillContext(ScriptEngine engine) {
engine.put("$TARGET", getTarget());
engine.put("$TARGETOBJECT", getUnencodedTargetObject());
for (ResourceParameter param : getParameters()) {
if (param.getParameterType() == ParameterType.METHOD_PARAMETER) {
engine.put("$" + param.getName(), param.getUnencodedValue());
}
}
}
@Override
public Map<String, Object> getNotificationAttributes() {
Map<String, Object> map = new HashMap<>();
map.put("target", getTarget());
map.put("method", getMethod());
map.put("resultObject", getResultObject());
return map;
}
@Override
public Object apply(ControlEvent event) throws ResourceApplyException {
try {
Set<ResourceParameter> paramList = new TreeSet<ResourceParameter>(new ParameterSequenceComparator());
paramList.addAll(getParameters());
Class<? extends Invoker> facClass = (Class<? extends Invoker>) Class.forName(getInvokerClass());
Method createMethod = facClass.getMethod("createInstance");
Invoker fac = (Invoker) createMethod.invoke(null);
return fac.execute(getInvokerParam(), getTarget(), getMethod(), paramList);
} catch (CibetException e) {
throw e;
} catch (InvocationTargetException e) {
Throwable cause = e.getCause();
log.debug("cause=" + cause);
while (cause != null) {
if (cause instanceof CibetException) {
throw (CibetException) cause;
}
cause = cause.getCause();
}
// check EJBException cause but do not refer to EJBException
cause = e.getCause();
if (cause != null) {
Throwable causedBy = null;
try {
Method m = cause.getClass().getMethod("getCausedByException");
causedBy = (Throwable) m.invoke(cause);
} catch (Exception e1) {
log.debug("method getCausedByException() does not exist: " + e1.getMessage());
}
if (causedBy != null && causedBy instanceof CibetException) {
throw (CibetException) causedBy;
}
}
log.error(e.getMessage());
throw new ResourceApplyException("Apply of Method Invocation failed:\n" + toString(), e);
} catch (Exception e) {
log.error(e.getMessage());
throw new ResourceApplyException("Apply of Method Invocation failed:\n" + toString(), e);
}
}
/**
* the Invoker implementation class that is capable to provide an instance of the object class for executing the
* controlled event on it.
*
* @return the invokerClass
*/
public String getInvokerClass() {
return invokerClass;
}
/**
* the Invoker implementation class that is capable to provide an instance of the object class for executing the
* controlled event on it.
*
* @param invokerClass
* the invokerClass to set
*/
public void setInvokerClass(String invokerClass) {
this.invokerClass = invokerClass;
}
/**
* optional JNDI name of EJB or factory constructor
*
* @return the invokerParam
*/
public String getInvokerParam() {
return invokerParam;
}
/**
* optional JNDI name of EJB or factory constructor
*
* @param invokerParam
* the invokerParam to set
*/
public void setInvokerParam(String invokerParam) {
this.invokerParam = invokerParam;
}
/**
* method name which is controlled
*
* @return the method
*/
public String getMethod() {
return method;
}
/**
* method name which is controlled
*
* @param method
* the method to set
*/
public void setMethod(String method) {
this.method = method;
}
/**
* In case the Resource represents a method invocation, this is the Method reflection object.
*
* @return the methodObject
*/
public Method getMethodObject() {
return methodObject;
}
/**
* In case the Resource represents a method invocation, this is the Method reflection object.
*
* @param mo
* the methodObject to set
*/
public void setMethodObject(Method mo) {
this.methodObject = mo;
if (mo == null) {
method = null;
} else {
method = mo.getName();
}
}
@Override
public String createUniqueId() {
Base64 b64 = new Base64();
StringBuffer b = new StringBuffer();
b.append(getTarget());
b.append(getMethod());
for (ResourceParameter param : getParameters()) {
b.append(b64.encodeToString(param.getEncodedValue()));
}
return DigestUtils.sha256Hex(b.toString());
}
public String toString() {
StringBuffer b = new StringBuffer();
b.append("[");
b.append(this.getClass().getSimpleName());
b.append("] ");
b.append(super.toString());
b.append(" ; method: ");
b.append(getMethod());
b.append(" ; invoker: ");
b.append(invokerClass);
return b.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.dht.atomic;
import java.io.Externalizable;
import java.nio.ByteBuffer;
import java.util.UUID;
import javax.cache.processor.EntryProcessor;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.internal.GridDirectTransient;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.CacheObject;
import org.apache.ignite.internal.processors.cache.GridCacheDeployable;
import org.apache.ignite.internal.processors.cache.GridCacheIdMessage;
import org.apache.ignite.internal.processors.cache.GridCacheOperation;
import org.apache.ignite.internal.processors.cache.GridCacheSharedContext;
import org.apache.ignite.internal.processors.cache.KeyCacheObject;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.plugin.extensions.communication.MessageReader;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
*
*/
public abstract class GridDhtAtomicAbstractUpdateRequest extends GridCacheIdMessage implements GridCacheDeployable {
/** Skip store flag bit mask. */
protected static final int DHT_ATOMIC_SKIP_STORE_FLAG_MASK = 0x01;
/** Keep binary flag. */
protected static final int DHT_ATOMIC_KEEP_BINARY_FLAG_MASK = 0x02;
/** Near cache key flag. */
protected static final int DHT_ATOMIC_NEAR_FLAG_MASK = 0x04;
/** */
static final int DHT_ATOMIC_HAS_RESULT_MASK = 0x08;
/** */
private static final int DHT_ATOMIC_REPLY_WITHOUT_DELAY = 0x10;
/** */
protected static final int DHT_ATOMIC_OBSOLETE_NEAR_KEY_FLAG_MASK = 0x20;
/** Flag indicating transformation operation was performed. */
protected static final int DHT_ATOMIC_TRANSFORM_OP_FLAG_MASK = 0x40;
/** Message index. */
public static final int CACHE_MSG_IDX = nextIndexId();
/** Future ID on primary. */
protected long futId;
/** Write version. */
protected GridCacheVersion writeVer;
/** Write synchronization mode. */
protected CacheWriteSynchronizationMode syncMode;
/** Topology version. */
protected AffinityTopologyVersion topVer;
/** Subject ID. */
protected UUID subjId;
/** Task name hash. */
protected int taskNameHash;
/** Node ID. */
@GridDirectTransient
protected UUID nodeId;
/** On response flag. Access should be synced on future. */
@GridDirectTransient
private boolean onRes;
/** */
private UUID nearNodeId;
/** */
private long nearFutId;
/** Additional flags. */
protected byte flags;
/**
* Empty constructor required by {@link Externalizable}.
*/
protected GridDhtAtomicAbstractUpdateRequest() {
// N-op.
}
/**
* Constructor.
*
* @param cacheId Cache ID.
* @param nodeId Node ID.
*/
protected GridDhtAtomicAbstractUpdateRequest(int cacheId,
UUID nodeId,
long futId,
GridCacheVersion writeVer,
CacheWriteSynchronizationMode syncMode,
@NotNull AffinityTopologyVersion topVer,
UUID subjId,
int taskNameHash,
boolean addDepInfo,
boolean keepBinary,
boolean skipStore
) {
assert topVer.topologyVersion() > 0 : topVer;
this.cacheId = cacheId;
this.nodeId = nodeId;
this.futId = futId;
this.writeVer = writeVer;
this.syncMode = syncMode;
this.topVer = topVer;
this.subjId = subjId;
this.taskNameHash = taskNameHash;
this.addDepInfo = addDepInfo;
if (skipStore)
setFlag(true, DHT_ATOMIC_SKIP_STORE_FLAG_MASK);
if (keepBinary)
setFlag(true, DHT_ATOMIC_KEEP_BINARY_FLAG_MASK);
}
/** {@inheritDoc} */
@Override public final AffinityTopologyVersion topologyVersion() {
return topVer;
}
/**
* @param nearNodeId Near node ID.
* @param nearFutId Future ID on near node.
*/
void nearReplyInfo(UUID nearNodeId, long nearFutId) {
assert nearNodeId != null;
this.nearNodeId = nearNodeId;
this.nearFutId = nearFutId;
}
/**
* @return {@code True} if backups should reply immediately.
*/
boolean replyWithoutDelay() {
return isFlag(DHT_ATOMIC_REPLY_WITHOUT_DELAY);
}
/**
* @param replyWithoutDelay {@code True} if backups should reply immediately.
*/
void replyWithoutDelay(boolean replyWithoutDelay) {
setFlag(replyWithoutDelay, DHT_ATOMIC_REPLY_WITHOUT_DELAY);
}
/**
* @param res Result flag.
*/
void hasResult(boolean res) {
setFlag(res, DHT_ATOMIC_HAS_RESULT_MASK);
}
/**
* @return Result flag.
*/
private boolean hasResult() {
return isFlag(DHT_ATOMIC_HAS_RESULT_MASK);
}
/**
* @return Near node ID.
*/
public UUID nearNodeId() {
return nearNodeId;
}
/** {@inheritDoc} */
@Override public int lookupIndex() {
return CACHE_MSG_IDX;
}
/**
* @return Node ID.
*/
public UUID nodeId() {
return nodeId;
}
/**
* @return Flags.
*/
public final byte flags() {
return flags;
}
/**
* @return Keep binary flag.
*/
public final boolean keepBinary() {
return isFlag(DHT_ATOMIC_KEEP_BINARY_FLAG_MASK);
}
/**
* @return Skip write-through to a persistent storage.
*/
public final boolean skipStore() {
return isFlag(DHT_ATOMIC_SKIP_STORE_FLAG_MASK);
}
/**
* @return {@code True} if transformation operation was performed.
*/
public final boolean transformOperation() {
return isFlag(DHT_ATOMIC_TRANSFORM_OP_FLAG_MASK);
}
/**
* @return {@code True} if on response flag changed.
*/
public boolean onResponse() {
return !onRes && (onRes = true);
}
/**
* @return {@code True} if response was received.
*/
boolean hasResponse() {
return onRes;
}
/** {@inheritDoc} */
@Override public boolean addDeploymentInfo() {
return addDepInfo;
}
/**
* @return Force transform backups flag.
*/
public abstract boolean forceTransformBackups();
/** {@inheritDoc} */
@Override public IgniteLogger messageLogger(GridCacheSharedContext ctx) {
return ctx.atomicMessageLogger();
}
/** {@inheritDoc} */
@Override public void onAckReceived() {
cleanup();
}
/**
* @param key Key to add.
* @param val Value, {@code null} if should be removed.
* @param entryProcessor Entry processor.
* @param ttl TTL (optional).
* @param conflictExpireTime Conflict expire time (optional).
* @param conflictVer Conflict version (optional).
* @param addPrevVal If {@code true} adds previous value.
* @param prevVal Previous value.
* @param updateCntr Update counter.
* @param cacheOp Corresponding cache operation.
*/
public abstract void addWriteValue(KeyCacheObject key,
@Nullable CacheObject val,
EntryProcessor<Object, Object, Object> entryProcessor,
long ttl,
long conflictExpireTime,
@Nullable GridCacheVersion conflictVer,
boolean addPrevVal,
@Nullable CacheObject prevVal,
long updateCntr,
GridCacheOperation cacheOp);
/**
* @param key Key to add.
* @param val Value, {@code null} if should be removed.
* @param entryProcessor Entry processor.
* @param ttl TTL.
* @param expireTime Expire time.
*/
public abstract void addNearWriteValue(KeyCacheObject key,
@Nullable CacheObject val,
EntryProcessor<Object, Object, Object> entryProcessor,
long ttl,
long expireTime);
/**
* Cleanup values not needed after message was sent.
*/
protected abstract void cleanup();
/**
* @return Subject ID.
*/
public final UUID subjectId() {
return subjId;
}
/**
* @return Task name.
*/
public final int taskNameHash() {
return taskNameHash;
}
/**
* @return Future ID on primary node.
*/
public final long futureId() {
return futId;
}
/**
* @return Future ID on near node.
*/
public final long nearFutureId() {
return nearFutId;
}
/**
* @return Write version.
*/
public final GridCacheVersion writeVersion() {
return writeVer;
}
/**
* @return Cache write synchronization mode.
*/
public final CacheWriteSynchronizationMode writeSynchronizationMode() {
return syncMode;
}
/**
* @return Keys size.
*/
public abstract int size();
/**
* @return Keys size.
*/
public abstract int nearSize();
/**
* @param key Key to check.
* @return {@code true} if request keys contain key.
*/
public abstract boolean hasKey(KeyCacheObject key);
/**
* @param idx Key index.
* @return Key.
*/
public abstract KeyCacheObject key(int idx);
/**
* @return Obsolete near cache keys size.
*/
public abstract int obsoleteNearKeysSize();
/**
* @param idx Obsolete near cache key index.
* @return Obsolete near cache key.
*/
public abstract KeyCacheObject obsoleteNearKey(int idx);
/**
* @param updCntr Update counter.
* @return Update counter.
*/
public abstract Long updateCounter(int updCntr);
/**
* @param idx Near key index.
* @return Key.
*/
public abstract KeyCacheObject nearKey(int idx);
/**
* @param idx Key index.
* @return Value.
*/
@Nullable public abstract CacheObject value(int idx);
/**
* @param idx Key index.
* @return Value.
*/
@Nullable public abstract CacheObject previousValue(int idx);
/**
* @param idx Key index.
* @return Entry processor.
*/
@Nullable public abstract EntryProcessor<Object, Object, Object> entryProcessor(int idx);
/**
* @param idx Near key index.
* @return Value.
*/
@Nullable public abstract CacheObject nearValue(int idx);
/**
* @param idx Key index.
* @return Transform closure.
*/
@Nullable public abstract EntryProcessor<Object, Object, Object> nearEntryProcessor(int idx);
/**
* @param idx Index.
* @return Conflict version.
*/
@Nullable public abstract GridCacheVersion conflictVersion(int idx);
/**
* @param idx Index.
* @return TTL.
*/
public abstract long ttl(int idx);
/**
* @param idx Index.
* @return TTL for near cache update.
*/
public abstract long nearTtl(int idx);
/**
* @param idx Index.
* @return Conflict expire time.
*/
public abstract long conflictExpireTime(int idx);
/**
* @param idx Index.
* @return Expire time for near cache update.
*/
public abstract long nearExpireTime(int idx);
/**
* @return Optional arguments for entry processor.
*/
@Nullable public abstract Object[] invokeArguments();
/**
* Sets flag mask.
*
* @param flag Set or clear.
* @param mask Mask.
*/
protected final void setFlag(boolean flag, int mask) {
flags = flag ? (byte)(flags | mask) : (byte)(flags & ~mask);
}
/**
* Reags flag mask.
*
* @param mask Mask to read.
* @return Flag value.
*/
final boolean isFlag(int mask) {
return (flags & mask) != 0;
}
/** {@inheritDoc} */
@Override public byte fieldsCount() {
return 13;
}
/** {@inheritDoc} */
@Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) {
writer.setBuffer(buf);
if (!super.writeTo(buf, writer))
return false;
if (!writer.isHeaderWritten()) {
if (!writer.writeHeader(directType(), fieldsCount()))
return false;
writer.onHeaderWritten();
}
switch (writer.state()) {
case 4:
if (!writer.writeByte("flags", flags))
return false;
writer.incrementState();
case 5:
if (!writer.writeLong("futId", futId))
return false;
writer.incrementState();
case 6:
if (!writer.writeLong("nearFutId", nearFutId))
return false;
writer.incrementState();
case 7:
if (!writer.writeUuid("nearNodeId", nearNodeId))
return false;
writer.incrementState();
case 8:
if (!writer.writeUuid("subjId", subjId))
return false;
writer.incrementState();
case 9:
if (!writer.writeByte("syncMode", syncMode != null ? (byte)syncMode.ordinal() : -1))
return false;
writer.incrementState();
case 10:
if (!writer.writeInt("taskNameHash", taskNameHash))
return false;
writer.incrementState();
case 11:
if (!writer.writeAffinityTopologyVersion("topVer", topVer))
return false;
writer.incrementState();
case 12:
if (!writer.writeMessage("writeVer", writeVer))
return false;
writer.incrementState();
}
return true;
}
/** {@inheritDoc} */
@Override public boolean readFrom(ByteBuffer buf, MessageReader reader) {
reader.setBuffer(buf);
if (!reader.beforeMessageRead())
return false;
if (!super.readFrom(buf, reader))
return false;
switch (reader.state()) {
case 4:
flags = reader.readByte("flags");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 5:
futId = reader.readLong("futId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 6:
nearFutId = reader.readLong("nearFutId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 7:
nearNodeId = reader.readUuid("nearNodeId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 8:
subjId = reader.readUuid("subjId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 9:
byte syncModeOrd;
syncModeOrd = reader.readByte("syncMode");
if (!reader.isLastRead())
return false;
syncMode = CacheWriteSynchronizationMode.fromOrdinal(syncModeOrd);
reader.incrementState();
case 10:
taskNameHash = reader.readInt("taskNameHash");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 11:
topVer = reader.readAffinityTopologyVersion("topVer");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 12:
writeVer = reader.readMessage("writeVer");
if (!reader.isLastRead())
return false;
reader.incrementState();
}
return reader.afterMessageRead(GridDhtAtomicAbstractUpdateRequest.class);
}
/** {@inheritDoc} */
@Override public String toString() {
StringBuilder flags = new StringBuilder();
if (skipStore())
appendFlag(flags, "skipStore");
if (keepBinary())
appendFlag(flags, "keepBinary");
if (isFlag(DHT_ATOMIC_NEAR_FLAG_MASK))
appendFlag(flags, "near");
if (hasResult())
appendFlag(flags, "hasRes");
if (replyWithoutDelay())
appendFlag(flags, "resNoDelay");
return S.toString(GridDhtAtomicAbstractUpdateRequest.class, this,
"flags", flags.toString());
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.editor.dmn.converter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.fail;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.io.InputStream;
import java.util.Date;
import java.util.List;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.io.IOUtils;
import org.flowable.dmn.model.Decision;
import org.flowable.dmn.model.DecisionRule;
import org.flowable.dmn.model.DecisionTable;
import org.flowable.dmn.model.DecisionTableOrientation;
import org.flowable.dmn.model.DmnDefinition;
import org.flowable.dmn.model.HitPolicy;
import org.flowable.dmn.model.InputClause;
import org.flowable.dmn.model.LiteralExpression;
import org.flowable.dmn.model.OutputClause;
import org.flowable.dmn.model.RuleInputClauseContainer;
import org.flowable.dmn.model.RuleOutputClauseContainer;
import org.flowable.dmn.model.UnaryTests;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Yvo Swillens
*/
public class DmnJsonConverterTest {
private static final Logger LOGGER = LoggerFactory.getLogger(DmnJsonConverterTest.class);
private static final String JSON_RESOURCE_1 = "org/flowable/editor/dmn/converter/decisiontable_1.json";
private static final String JSON_RESOURCE_2 = "org/flowable/editor/dmn/converter/decisiontable_no_rules.json";
private static final String JSON_RESOURCE_3 = "org/flowable/editor/dmn/converter/decisiontable_2.json";
private static final String JSON_RESOURCE_4 = "org/flowable/editor/dmn/converter/decisiontable_empty_expressions.json";
private static final String JSON_RESOURCE_5 = "org/flowable/editor/dmn/converter/decisiontable_order.json";
private static final String JSON_RESOURCE_6 = "org/flowable/editor/dmn/converter/decisiontable_entries.json";
private static final String JSON_RESOURCE_7 = "org/flowable/editor/dmn/converter/decisiontable_dates.json";
private static final String JSON_RESOURCE_8 = "org/flowable/editor/dmn/converter/decisiontable_empty_operator.json";
private static final String JSON_RESOURCE_9 = "org/flowable/editor/dmn/converter/decisiontable_complex_output_expression_regression.json";
private static final String JSON_RESOURCE_10 = "org/flowable/editor/dmn/converter/decisiontable_regression_model_v1.json";
private static final String JSON_RESOURCE_11 = "org/flowable/editor/dmn/converter/decisiontable_regression_model_v1_no_type.json";
private static final String JSON_RESOURCE_12 = "org/flowable/editor/dmn/converter/decisiontable_regression_model_v1_no_type2.json";
private static final String JSON_RESOURCE_13 = "org/flowable/editor/dmn/converter/decisiontable_regression_model_v1_no_type3.json";
private static final String JSON_RESOURCE_14 = "org/flowable/editor/dmn/converter/decisiontable_regression_model_v1_no_type4.json";
private static final String JSON_RESOURCE_15 = "org/flowable/editor/dmn/converter/decisiontable_aggregation.json";
private static final String JSON_RESOURCE_16 = "org/flowable/editor/dmn/converter/decisiontable_special_characters.json";
private static final String JSON_RESOURCE_17 = "org/flowable/editor/dmn/converter/decisiontable_custom_input_expression.json";
private static final String JSON_RESOURCE_18 = "org/flowable/editor/dmn/converter/decisiontable_collections_collection_input.json";
private static final String JSON_RESOURCE_19 = "org/flowable/editor/dmn/converter/decisiontable_collections_collection_compare.json";
private static final String JSON_RESOURCE_20 = "org/flowable/editor/dmn/converter/decisiontable_complex_output_expression.json";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@Test
public void testConvertJsonToDmnOK() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_1);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
assertNotNull(dmnDefinition);
assertEquals(DmnJsonConverter.MODEL_NAMESPACE, dmnDefinition.getNamespace());
assertEquals(DmnJsonConverter.URI_JSON, dmnDefinition.getTypeLanguage());
assertEquals("definition_abc", dmnDefinition.getId());
assertEquals("decisionTableRule1", dmnDefinition.getName());
assertNotNull(dmnDefinition.getDecisions());
assertEquals(1, dmnDefinition.getDecisions().size());
Decision decision = dmnDefinition.getDecisions().get(0);
assertNotNull(decision);
assertEquals("decTable1", decision.getId());
DecisionTable decisionTable = (DecisionTable) decision.getExpression();
assertNotNull(decisionTable);
assertEquals("decisionTable_11", decisionTable.getId());
assertEquals(HitPolicy.ANY, decisionTable.getHitPolicy());
assertEquals(DecisionTableOrientation.RULE_AS_ROW, decisionTable.getPreferredOrientation());
List<InputClause> inputClauses = decisionTable.getInputs();
assertNotNull(inputClauses);
assertEquals(2, inputClauses.size());
List<OutputClause> outputClauses = decisionTable.getOutputs();
assertNotNull(outputClauses);
assertEquals(1, outputClauses.size());
// Condition 1
InputClause condition1 = inputClauses.get(0);
assertNotNull(condition1.getInputExpression());
LiteralExpression inputExpression11 = condition1.getInputExpression();
assertNotNull(inputExpression11);
assertEquals("Order Size", inputExpression11.getLabel());
assertEquals("inputExpression_input1", inputExpression11.getId());
assertEquals("number", inputExpression11.getTypeRef());
// Condition 2
InputClause condition2 = inputClauses.get(1);
assertNotNull(condition2.getInputExpression());
LiteralExpression inputExpression21 = condition2.getInputExpression();
assertNotNull(inputExpression21);
assertEquals("Registered On", inputExpression21.getLabel());
assertEquals("inputExpression_input2", inputExpression21.getId());
assertEquals("date", inputExpression21.getTypeRef());
// Conclusion 1
OutputClause conclusion1 = outputClauses.get(0);
assertNotNull(conclusion1);
assertEquals("Has discount", conclusion1.getLabel());
assertEquals("outputExpression_output1", conclusion1.getId());
assertEquals("boolean", conclusion1.getTypeRef());
assertEquals("newVariable1", conclusion1.getName());
// Rule 1
assertNotNull(decisionTable.getRules());
assertEquals(2, decisionTable.getRules().size());
List<DecisionRule> rules = decisionTable.getRules();
assertEquals(2, rules.get(0).getInputEntries().size());
// input expression 1
RuleInputClauseContainer ruleClauseContainer11 = rules.get(0).getInputEntries().get(0);
UnaryTests inputEntry11 = ruleClauseContainer11.getInputEntry();
assertNotNull(inputEntry11);
assertEquals("< 10", inputEntry11.getText());
assertSame(condition1, ruleClauseContainer11.getInputClause());
// input expression 2
RuleInputClauseContainer ruleClauseContainer12 = rules.get(0).getInputEntries().get(1);
UnaryTests inputEntry12 = ruleClauseContainer12.getInputEntry();
assertNotNull(inputEntry12);
assertEquals("<= date:toDate('1977-09-18')", inputEntry12.getText());
assertSame(condition2, ruleClauseContainer12.getInputClause());
// output expression 1
assertEquals(1, rules.get(0).getOutputEntries().size());
RuleOutputClauseContainer ruleClauseContainer13 = rules.get(0).getOutputEntries().get(0);
LiteralExpression outputEntry13 = ruleClauseContainer13.getOutputEntry();
assertNotNull(outputEntry13);
assertEquals("false", outputEntry13.getText());
assertSame(conclusion1, ruleClauseContainer13.getOutputClause());
// input expression 1
RuleInputClauseContainer ruleClauseContainer21 = rules.get(1).getInputEntries().get(0);
UnaryTests inputEntry21 = ruleClauseContainer21.getInputEntry();
assertNotNull(inputEntry21);
assertEquals("> 10", inputEntry21.getText());
assertSame(condition1, ruleClauseContainer21.getInputClause());
// input expression 2
RuleInputClauseContainer ruleClauseContainer22 = rules.get(1).getInputEntries().get(1);
UnaryTests inputEntry22 = ruleClauseContainer22.getInputEntry();
assertNotNull(inputEntry22);
assertEquals("> date:toDate('1977-09-18')", inputEntry22.getText());
assertSame(condition2, ruleClauseContainer22.getInputClause());
// output expression 1
assertEquals(1, rules.get(1).getOutputEntries().size());
RuleOutputClauseContainer ruleClauseContainer23 = rules.get(1).getOutputEntries().get(0);
LiteralExpression outputEntry23 = ruleClauseContainer23.getOutputEntry();
assertNotNull(outputEntry23);
assertEquals("true", outputEntry23.getText());
assertSame(conclusion1, ruleClauseContainer23.getOutputClause());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnNoRules() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_2);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
assertNotNull(dmnDefinition);
assertEquals(DmnJsonConverter.MODEL_NAMESPACE, dmnDefinition.getNamespace());
assertEquals("definition_abc", dmnDefinition.getId());
assertEquals("decisionTableRule1", dmnDefinition.getName());
assertEquals(DmnJsonConverter.URI_JSON, dmnDefinition.getTypeLanguage());
assertNotNull(dmnDefinition.getDecisions());
assertEquals(1, dmnDefinition.getDecisions().size());
Decision decision = dmnDefinition.getDecisions().get(0);
assertNotNull(decision);
assertEquals("decTable1", decision.getId());
DecisionTable decisionTable = (DecisionTable) decision.getExpression();
assertNotNull(decisionTable);
assertEquals("decisionTable_11", decisionTable.getId());
assertEquals(HitPolicy.ANY, decisionTable.getHitPolicy());
assertEquals(DecisionTableOrientation.RULE_AS_ROW, decisionTable.getPreferredOrientation());
List<InputClause> inputClauses = decisionTable.getInputs();
assertNotNull(inputClauses);
assertEquals(2, inputClauses.size());
LiteralExpression inputExpression11 = inputClauses.get(0).getInputExpression();
assertNotNull(inputExpression11);
assertEquals("Order Size", inputExpression11.getLabel());
assertEquals("inputExpression_1", inputExpression11.getId());
assertEquals("number", inputExpression11.getTypeRef());
assertEquals("ordersize", inputExpression11.getText());
LiteralExpression inputExpression12 = inputClauses.get(1).getInputExpression();
assertNotNull(inputExpression12);
assertEquals("Registered On", inputExpression12.getLabel());
assertEquals("inputExpression_2", inputExpression12.getId());
assertEquals("date", inputExpression12.getTypeRef());
assertEquals("registered", inputExpression12.getText());
List<OutputClause> outputClauses = decisionTable.getOutputs();
assertNotNull(outputClauses);
assertEquals(1, outputClauses.size());
// Condition 1
OutputClause outputClause1 = outputClauses.get(0);
assertNotNull(outputClause1);
assertEquals("Has discount", outputClause1.getLabel());
assertEquals("outputExpression_3", outputClause1.getId());
assertEquals("newVariable1", outputClause1.getName());
assertEquals("boolean", outputClause1.getTypeRef());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmn2OK() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_3);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
assertNotNull(dmnDefinition);
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnEmptyExpressions() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_4);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
assertNotNull(dmnDefinition);
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("-", decisionTable.getRules().get(0).getInputEntries().get(0).getInputEntry().getText());
assertEquals("-", decisionTable.getRules().get(1).getInputEntries().get(0).getInputEntry().getText());
assertEquals("-", decisionTable.getRules().get(2).getInputEntries().get(0).getInputEntry().getText());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnConditionOrder() {
// Test that editor json, which contains the rules in the incorrect order in
// the rule object,
// is converted to a dmn model where the rule columns are in the same order
// as the input/output clauses
JsonNode testJsonResource = parseJson(JSON_RESOURCE_5);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
assertNotNull(dmnDefinition);
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
List<DecisionRule> rules = decisionTable.getRules();
assertNotNull(rules);
assertEquals(1, rules.size());
assertNotNull(rules.get(0).getOutputEntries());
assertEquals(3, rules.get(0).getOutputEntries().size());
assertEquals("outputExpression_14", rules.get(0).getOutputEntries().get(0).getOutputClause().getId());
assertEquals("outputExpression_13", rules.get(0).getOutputEntries().get(1).getOutputClause().getId());
assertEquals("outputExpression_15", rules.get(0).getOutputEntries().get(2).getOutputClause().getId());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnEntries() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_6);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("OUTPUT ORDER", decisionTable.getHitPolicy().getValue());
assertEquals("\"AAA\",\"BBB\"", decisionTable.getInputs().get(0).getInputValues().getText());
assertEquals("AAA", decisionTable.getInputs().get(0).getInputValues().getTextValues().get(0));
assertEquals("BBB", decisionTable.getInputs().get(0).getInputValues().getTextValues().get(1));
assertEquals("\"THIRD\",\"FIRST\",\"SECOND\"", decisionTable.getOutputs().get(0).getOutputValues().getText());
assertEquals("THIRD", decisionTable.getOutputs().get(0).getOutputValues().getTextValues().get(0));
assertEquals("FIRST", decisionTable.getOutputs().get(0).getOutputValues().getTextValues().get(1));
assertEquals("SECOND", decisionTable.getOutputs().get(0).getOutputValues().getTextValues().get(2));
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnDates() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_7);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("== date:toDate('14-06-2017')", decisionTable.getRules().get(0).getInputEntries().get(0).getInputEntry().getText());
assertEquals("!= date:toDate('16-06-2017')", decisionTable.getRules().get(1).getInputEntries().get(0).getInputEntry().getText());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnEmptyOperator() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_8);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("date:toDate('2017-06-01')", decisionTable.getRules().get(0).getInputEntries().get(0).getInputEntry().getText());
assertEquals("-", decisionTable.getRules().get(0).getInputEntries().get(1).getInputEntry().getText());
assertNotNull(decisionTable.getRules().get(0).getInputEntries().get(0).getInputClause());
assertNotNull(decisionTable.getRules().get(0).getInputEntries().get(1).getInputClause());
assertEquals("date:toDate('2017-06-02')", decisionTable.getRules().get(1).getInputEntries().get(0).getInputEntry().getText());
assertEquals("-", decisionTable.getRules().get(1).getInputEntries().get(1).getInputEntry().getText());
assertNotNull(decisionTable.getRules().get(1).getInputEntries().get(0).getInputClause());
assertNotNull(decisionTable.getRules().get(1).getInputEntries().get(1).getInputClause());
assertEquals("date:toDate('2017-06-03')", decisionTable.getRules().get(0).getOutputEntries().get(0).getOutputEntry().getText());
assertEquals("", decisionTable.getRules().get(1).getOutputEntries().get(0).getOutputEntry().getText());
assertNotNull(decisionTable.getRules().get(0).getOutputEntries().get(0).getOutputClause());
assertNotNull(decisionTable.getRules().get(1).getOutputEntries().get(0).getOutputClause());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnComplexOutputExpressionRegression() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_9);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("refVar1 * refVar2", decisionTable.getRules().get(0).getOutputEntries().get(0).getOutputEntry().getText());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnComplexOutputExpression() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_20);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("${refVar1 * refVar2}", decisionTable.getRules().get(0).getOutputEntries().get(0).getOutputEntry().getText());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnRegressionModelv1() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_10);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals(4, decisionTable.getInputs().size());
assertEquals(4, decisionTable.getOutputs().size());
assertEquals(4, decisionTable.getRules().get(0).getInputEntries().size());
assertEquals(4, decisionTable.getRules().get(0).getOutputEntries().size());
DecisionRule rule1 = decisionTable.getRules().get(0);
DecisionRule rule2 = decisionTable.getRules().get(1);
assertEquals("== \"TEST\"", rule1.getInputEntries().get(0).getInputEntry().getText());
assertEquals("== 100", rule1.getInputEntries().get(1).getInputEntry().getText());
assertEquals("== true", rule1.getInputEntries().get(2).getInputEntry().getText());
assertEquals("== date:toDate('2017-06-01')", rule1.getInputEntries().get(3).getInputEntry().getText());
assertEquals("\"WAS TEST\"", rule1.getOutputEntries().get(0).getOutputEntry().getText());
assertEquals("100", rule1.getOutputEntries().get(1).getOutputEntry().getText());
assertEquals("true", rule1.getOutputEntries().get(2).getOutputEntry().getText());
assertEquals("date:toDate('2017-06-01')", rule1.getOutputEntries().get(3).getOutputEntry().getText());
assertEquals("!= \"TEST\"", rule2.getInputEntries().get(0).getInputEntry().getText());
assertEquals("!= 100", rule2.getInputEntries().get(1).getInputEntry().getText());
assertEquals("== false", rule2.getInputEntries().get(2).getInputEntry().getText());
assertEquals("!= date:toDate('2017-06-01')", rule2.getInputEntries().get(3).getInputEntry().getText());
assertEquals("\"WASN'T TEST\"", rule2.getOutputEntries().get(0).getOutputEntry().getText());
assertEquals("1", rule2.getOutputEntries().get(1).getOutputEntry().getText());
assertEquals("false", rule2.getOutputEntries().get(2).getOutputEntry().getText());
assertEquals("date:toDate('2016-06-01')", rule2.getOutputEntries().get(3).getOutputEntry().getText());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnRegressionModelv1NoType() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_11);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("string", decisionTable.getInputs().get(0).getInputExpression().getTypeRef());
assertEquals("number", decisionTable.getInputs().get(1).getInputExpression().getTypeRef());
assertEquals("boolean", decisionTable.getInputs().get(2).getInputExpression().getTypeRef());
assertEquals("date", decisionTable.getInputs().get(3).getInputExpression().getTypeRef());
assertEquals("string", decisionTable.getOutputs().get(0).getTypeRef());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnRegressionModelv1NoType2() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_12);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("string", decisionTable.getInputs().get(0).getInputExpression().getTypeRef());
assertEquals("number", decisionTable.getInputs().get(1).getInputExpression().getTypeRef());
assertEquals("boolean", decisionTable.getInputs().get(2).getInputExpression().getTypeRef());
assertEquals("date", decisionTable.getInputs().get(3).getInputExpression().getTypeRef());
assertEquals("string", decisionTable.getOutputs().get(0).getTypeRef());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnRegressionModelv1NoType3() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_13);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("string", decisionTable.getInputs().get(0).getInputExpression().getTypeRef());
assertEquals("string", decisionTable.getOutputs().get(0).getTypeRef());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnRegressionModelv1NoType4() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_14);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("number", decisionTable.getInputs().get(0).getInputExpression().getTypeRef());
assertEquals("boolean", decisionTable.getOutputs().get(0).getTypeRef());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnCollectOperator() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_15);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("SUM", decisionTable.getAggregation().getValue());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnStringSpecialCharacters() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_16);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("== \"TEST\"", decisionTable.getRules().get(0).getInputEntries().get(0).getInputEntry().getText());
assertEquals("== \"TEST\"", decisionTable.getRules().get(1).getInputEntries().get(0).getInputEntry().getText());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnCustomExpressions() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_17);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("${inputVar4 != null}", decisionTable.getRules().get(0).getInputEntries().get(0).getInputEntry().getText());
assertEquals("#{inputVar4 > date:now()}", decisionTable.getRules().get(1).getInputEntries().get(0).getInputEntry().getText());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
}
@Test
public void testConvertJsonToDmnCollectionsCollectionInput() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_18);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("${collection:noneOf(collection1, \"testValue\")}", decisionTable.getRules().get(0).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:allOf(collection1, \"testValue\")}", decisionTable.getRules().get(1).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:allOf(collection1, 'testVar1,testVar2')}", decisionTable.getRules().get(2).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:allOf(collection1, '\"testValue1\",\"testValue2\"')}", decisionTable.getRules().get(3).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:allOf(collection1, '10,20')}", decisionTable.getRules().get(4).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:allOf(collection1, 10)}", decisionTable.getRules().get(5).getInputEntries().get(0).getInputEntry().getText());
assertEquals("-", decisionTable.getRules().get(6).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:noneOf(collection1, \"testValue\")}", decisionTable.getRules().get(7).getInputEntries().get(0).getInputEntry().getText());
assertEquals("-", decisionTable.getRules().get(8).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:anyOf(collection1, \"testValue\")}", decisionTable.getRules().get(9).getInputEntries().get(0).getInputEntry().getText());
assertEquals("-", decisionTable.getRules().get(10).getInputEntries().get(0).getInputEntry().getText());
assertEquals("== \"testValue\"", decisionTable.getRules().get(11).getInputEntries().get(0).getInputEntry().getText());
assertEquals("== testCollection", decisionTable.getRules().get(12).getInputEntries().get(0).getInputEntry().getText());
assertEquals("!= \"testValue\"", decisionTable.getRules().get(13).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:allOf(collection1, '\"test,Value1\",\"test,Value2\"')}", decisionTable.getRules().get(14).getInputEntries().get(0).getInputEntry().getText());
// extension elements
assertEquals("NONE OF", decisionTable.getRules().get(0).getInputEntries().get(0).getInputEntry().getExtensionElements().get("operator").get(0).getElementText());
assertEquals("\"testValue\"", decisionTable.getRules().get(0).getInputEntries().get(0).getInputEntry().getExtensionElements().get("expression").get(0).getElementText());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
assertEquals("NONE OF", modelerJson.get("rules").get(0).get("inputExpression_1_operator").asText());
assertEquals("\"testValue\"", modelerJson.get("rules").get(0).get("inputExpression_1_expression").asText());
assertEquals("ALL OF", modelerJson.get("rules").get(1).get("inputExpression_1_operator").asText());
assertEquals("\"testValue\"", modelerJson.get("rules").get(1).get("inputExpression_1_expression").asText());
assertEquals("ALL OF", modelerJson.get("rules").get(2).get("inputExpression_1_operator").asText());
assertEquals("testVar1, testVar2", modelerJson.get("rules").get(2).get("inputExpression_1_expression").asText());
}
@Test
public void testConvertJsonToDmnCollectionsCollectionCompare() {
JsonNode testJsonResource = parseJson(JSON_RESOURCE_19);
DmnDefinition dmnDefinition = new DmnJsonConverter().convertToDmn(testJsonResource, "abc", 1, new Date());
DecisionTable decisionTable = (DecisionTable) dmnDefinition.getDecisions().get(0).getExpression();
assertEquals("${collection:noneOf(\"testValue\", input1)}", decisionTable.getRules().get(0).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:allOf(\"testValue\", input1)}", decisionTable.getRules().get(1).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:allOf('testVar1,testVar2', input1)}", decisionTable.getRules().get(2).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:allOf('\"testValue1\",\"testValue2\"', input1)}", decisionTable.getRules().get(3).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:allOf('10,20', input1)}", decisionTable.getRules().get(4).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:allOf(10, input1)}", decisionTable.getRules().get(5).getInputEntries().get(0).getInputEntry().getText());
assertEquals("-", decisionTable.getRules().get(6).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:noneOf(\"testValue\", input1)}", decisionTable.getRules().get(7).getInputEntries().get(0).getInputEntry().getText());
assertEquals("-", decisionTable.getRules().get(8).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:allOf(\"testValue\", input1)}", decisionTable.getRules().get(9).getInputEntries().get(0).getInputEntry().getText());
assertEquals("-", decisionTable.getRules().get(10).getInputEntries().get(0).getInputEntry().getText());
assertEquals("${collection:allOf('\"test,Value1\",\"test,Value2\"', input1)}", decisionTable.getRules().get(11).getInputEntries().get(0).getInputEntry().getText());
// extension elements
assertEquals("IS NOT IN", decisionTable.getRules().get(0).getInputEntries().get(0).getInputEntry().getExtensionElements().get("operator").get(0).getElementText());
assertEquals("\"testValue\"", decisionTable.getRules().get(0).getInputEntries().get(0).getInputEntry().getExtensionElements().get("expression").get(0).getElementText());
ObjectNode modelerJson = new DmnJsonConverter().convertToJson(dmnDefinition);
assertNotNull(modelerJson);
assertEquals("IS NOT IN", modelerJson.get("rules").get(0).get("inputExpression_1_operator").asText());
assertEquals("\"testValue\"", modelerJson.get("rules").get(0).get("inputExpression_1_expression").asText());
assertEquals("IS IN", modelerJson.get("rules").get(1).get("inputExpression_1_operator").asText());
assertEquals("\"testValue\"", modelerJson.get("rules").get(1).get("inputExpression_1_expression").asText());
assertEquals("IS IN", modelerJson.get("rules").get(2).get("inputExpression_1_operator").asText());
assertEquals("testVar1, testVar2", modelerJson.get("rules").get(2).get("inputExpression_1_expression").asText());
}
/* Helper methods */
protected String readJsonToString(String resource) {
try (InputStream is = this.getClass().getClassLoader().getResourceAsStream(resource)) {
return IOUtils.toString(is);
} catch (IOException e) {
fail("Could not read " + resource + " : " + e.getMessage());
return null;
}
}
protected JsonNode parseJson(String resource) {
String jsonString = readJsonToString(resource);
try {
return OBJECT_MAPPER.readTree(jsonString);
} catch (IOException e) {
fail("Could not parse " + resource + " : " + e.getMessage());
}
return null;
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.vcs.log.data;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.BackgroundTaskQueue;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.Consumer;
import com.intellij.util.ThrowableConsumer;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.vcs.log.*;
import com.intellij.vcs.log.util.StopWatch;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class VcsLogData implements Disposable, VcsLogDataProvider {
private static final Logger LOG = Logger.getInstance(VcsLogData.class);
private static final int RECENT_COMMITS_COUNT = Registry.intValue("vcs.log.recent.commits.count");
@NotNull private final Project myProject;
@NotNull private final Map<VirtualFile, VcsLogProvider> myLogProviders;
@NotNull private final BackgroundTaskQueue myDataLoaderQueue;
@NotNull private final MiniDetailsGetter myMiniDetailsGetter;
@NotNull private final CommitDetailsGetter myDetailsGetter;
/**
* Current user name, as specified in the VCS settings.
* It can be configured differently for different roots => store in a map.
*/
private final Map<VirtualFile, VcsUser> myCurrentUser = ContainerUtil.newHashMap();
/**
* Cached details of the latest commits.
* We store them separately from the cache of {@link DataGetter}, to make sure that they are always available,
* which is important because these details will be constantly visible to the user,
* thus it would be annoying to re-load them from VCS if the cache overflows.
*/
@NotNull private final Map<Integer, VcsCommitMetadata> myTopCommitsDetailsCache = ContainerUtil.newConcurrentMap();
@NotNull private final VcsUserRegistryImpl myUserRegistry;
@NotNull private final VcsLogHashMap myHashMap;
@NotNull private final ContainingBranchesGetter myContainingBranchesGetter;
@NotNull private final VcsLogRefresherImpl myRefresher;
@NotNull private final List<DataPackChangeListener> myDataPackChangeListeners = ContainerUtil.createLockFreeCopyOnWriteList();
@NotNull private final Consumer<Exception> myFatalErrorsConsumer;
public VcsLogData(@NotNull Project project,
@NotNull Map<VirtualFile, VcsLogProvider> logProviders,
@NotNull Consumer<Exception> fatalErrorsConsumer) {
myProject = project;
myLogProviders = logProviders;
myDataLoaderQueue = new BackgroundTaskQueue(project, "Loading history...");
myUserRegistry = (VcsUserRegistryImpl)ServiceManager.getService(project, VcsUserRegistry.class);
myFatalErrorsConsumer = fatalErrorsConsumer;
myHashMap = createLogHashMap();
myMiniDetailsGetter = new MiniDetailsGetter(myHashMap, logProviders, myTopCommitsDetailsCache, this);
myDetailsGetter = new CommitDetailsGetter(myHashMap, logProviders, this);
myRefresher =
new VcsLogRefresherImpl(myProject, myHashMap, myLogProviders, myUserRegistry, myTopCommitsDetailsCache, new Consumer<DataPack>() {
@Override
public void consume(DataPack dataPack) {
fireDataPackChangeEvent(dataPack);
}
}, new Consumer<Exception>() {
@Override
public void consume(Exception e) {
if (!(e instanceof ProcessCanceledException)) {
LOG.error(e);
}
}
}, RECENT_COMMITS_COUNT);
myContainingBranchesGetter = new ContainingBranchesGetter(this, this);
}
@NotNull
private VcsLogHashMap createLogHashMap() {
VcsLogHashMap hashMap;
try {
hashMap = new VcsLogHashMapImpl(myProject, myLogProviders, myFatalErrorsConsumer, this);
}
catch (IOException e) {
hashMap = new InMemoryHashMap();
LOG.error("Falling back to in-memory hashes", e);
}
return hashMap;
}
private void fireDataPackChangeEvent(@NotNull final DataPack dataPack) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
for (DataPackChangeListener listener : myDataPackChangeListeners) {
listener.onDataPackChange(dataPack);
}
}
});
}
public void addDataPackChangeListener(@NotNull final DataPackChangeListener listener) {
myDataPackChangeListeners.add(listener);
}
public void removeDataPackChangeListener(@NotNull DataPackChangeListener listener) {
myDataPackChangeListeners.remove(listener);
}
@NotNull
public DataPack getDataPack() {
return myRefresher.getCurrentDataPack();
}
@NotNull
public VisiblePackBuilder createVisiblePackBuilder() {
return new VisiblePackBuilder(myLogProviders, myHashMap, myTopCommitsDetailsCache, myDetailsGetter);
}
@Override
@Nullable
public CommitId getCommitId(int commitIndex) {
return myHashMap.getCommitId(commitIndex);
}
@Override
public int getCommitIndex(@NotNull Hash hash, @NotNull VirtualFile root) {
return myHashMap.getCommitIndex(hash, root);
}
@NotNull
public VcsLogHashMap getHashMap() {
return myHashMap;
}
public void initialize() {
final StopWatch initSw = StopWatch.start("initialize");
myDataLoaderQueue.clear();
runInBackground(new ThrowableConsumer<ProgressIndicator, VcsException>() {
@Override
public void consume(ProgressIndicator indicator) throws VcsException {
resetState();
readCurrentUser();
DataPack dataPack = myRefresher.readFirstBlock();
fireDataPackChangeEvent(dataPack);
initSw.report();
}
}, "Loading History...");
}
private void readCurrentUser() {
StopWatch sw = StopWatch.start("readCurrentUser");
for (Map.Entry<VirtualFile, VcsLogProvider> entry : myLogProviders.entrySet()) {
VirtualFile root = entry.getKey();
try {
VcsUser me = entry.getValue().getCurrentUser(root);
if (me != null) {
myCurrentUser.put(root, me);
}
else {
LOG.info("Username not configured for root " + root);
}
}
catch (VcsException e) {
LOG.warn("Couldn't read the username from root " + root, e);
}
}
sw.report();
}
private void resetState() {
myTopCommitsDetailsCache.clear();
}
@NotNull
public Set<VcsUser> getAllUsers() {
return myUserRegistry.getUsers();
}
@NotNull
public Map<VirtualFile, VcsUser> getCurrentUser() {
return myCurrentUser;
}
public boolean isMultiRoot() {
return myLogProviders.size() > 1;
}
@NotNull
public Project getProject() {
return myProject;
}
@NotNull
public Collection<VirtualFile> getRoots() {
return myLogProviders.keySet();
}
@NotNull
public Collection<VcsLogProvider> getLogProviders() {
return myLogProviders.values();
}
@NotNull
public ContainingBranchesGetter getContainingBranchesGetter() {
return myContainingBranchesGetter;
}
private void runInBackground(final ThrowableConsumer<ProgressIndicator, VcsException> task, final String title) {
Task.Backgroundable backgroundable = new Task.Backgroundable(myProject, title, false) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
indicator.setIndeterminate(true);
try {
task.consume(indicator);
}
catch (VcsException e) {
throw new RuntimeException(e); // TODO
}
}
};
myDataLoaderQueue.run(backgroundable, null, () -> myRefresher.getProgress().createProgressIndicator(backgroundable));
}
/**
* Makes the log perform complete refresh for all roots.
* It fairly retrieves the data from the VCS and rebuilds the whole log.
*/
public void refreshCompletely() {
initialize();
}
/**
* Makes the log perform refresh for the given root.
* This refresh can be optimized, i. e. it can query VCS just for the part of the log.
*/
public void refresh(@NotNull Collection<VirtualFile> roots) {
myRefresher.refresh(roots);
}
public CommitDetailsGetter getCommitDetailsGetter() {
return myDetailsGetter;
}
@NotNull
public MiniDetailsGetter getMiniDetailsGetter() {
return myMiniDetailsGetter;
}
@Override
public void dispose() {
myDataLoaderQueue.clear();
resetState();
}
@NotNull
public VcsLogProvider getLogProvider(@NotNull VirtualFile root) {
return myLogProviders.get(root);
}
@NotNull
public VcsUserRegistryImpl getUserRegistry() {
return myUserRegistry;
}
@NotNull
public VcsLogProgress getProgress() {
return myRefresher.getProgress();
}
}
| |
/*
* Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.db.object;
import java.util.ArrayList;
import java.util.List;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabase;
import com.orientechnologies.orient.core.db.ODatabasePojoAbstract;
import com.orientechnologies.orient.core.db.OUserObject2RecordHandler;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.db.record.ODatabaseRecordAbstract;
import com.orientechnologies.orient.core.dictionary.ODictionary;
import com.orientechnologies.orient.core.dictionary.ODictionaryWrapper;
import com.orientechnologies.orient.core.entity.OEntityManager;
import com.orientechnologies.orient.core.exception.ODatabaseException;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.iterator.OObjectIteratorCluster;
import com.orientechnologies.orient.core.iterator.OObjectIteratorMultiCluster;
import com.orientechnologies.orient.core.metadata.security.ODatabaseSecurityResources;
import com.orientechnologies.orient.core.metadata.security.ORole;
import com.orientechnologies.orient.core.record.ORecordSchemaAware;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.serialization.serializer.object.OObjectSerializerHelper;
import com.orientechnologies.orient.core.serialization.serializer.record.OSerializationThreadLocal;
import com.orientechnologies.orient.core.tx.OTransactionEntry;
import com.orientechnologies.orient.core.tx.OTransactionNoTx;
/**
* Object Database instance. It's a wrapper to the class ODatabaseDocumentTx but handle the conversion between ODocument instances
* and POJOs.
*
* @see ODatabaseDocumentTx
* @author Luca Garulli
*/
@SuppressWarnings("unchecked")
public class ODatabaseObjectTx extends ODatabasePojoAbstract<ODocument, Object> implements ODatabaseObject,
OUserObject2RecordHandler {
private ODictionary<Object> dictionary;
private OEntityManager entityManager;
private boolean saveOnlyDirty;
public ODatabaseObjectTx(final String iURL) {
super(new ODatabaseDocumentTx(iURL));
underlying.setDatabaseOwner(this);
entityManager = OEntityManager.getEntityManagerByDatabaseURL(iURL);
saveOnlyDirty = OGlobalConfiguration.OBJECT_SAVE_ONLY_DIRTY.getValueAsBoolean();
}
public <T> T newInstance(final Class<T> iType) {
return (T) newInstance(iType.getName());
}
/**
* Create a new POJO by its class name. Assure to have called the registerEntityClasses() declaring the packages that are part of
* entity classes.
*
* @see #registerEntityClasses(String)
*/
public Object newInstance(final String iClassName) {
checkOpeness();
checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_CREATE, iClassName);
try {
return entityManager.createPojo(iClassName);
} catch (Exception e) {
OLogManager.instance().error(this, "Error on creating object of class " + iClassName, e, ODatabaseException.class);
}
return null;
}
public <RET> OObjectIteratorMultiCluster<RET> browseClass(final Class<RET> iClusterClass) {
return browseClass(iClusterClass, true);
}
public <RET> OObjectIteratorMultiCluster<RET> browseClass(final Class<RET> iClusterClass, final boolean iPolymorphic) {
if (iClusterClass == null)
return null;
return browseClass(iClusterClass.getSimpleName(), iPolymorphic);
}
public <RET> OObjectIteratorMultiCluster<RET> browseClass(final String iClassName) {
return browseClass(iClassName, true);
}
public <RET> OObjectIteratorMultiCluster<RET> browseClass(final String iClassName, final boolean iPolymorphic) {
checkOpeness();
checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_READ, iClassName);
return new OObjectIteratorMultiCluster<RET>(this, (ODatabaseRecordAbstract<ODocument>) getUnderlying().getUnderlying(),
iClassName, iPolymorphic);
}
public <RET> OObjectIteratorCluster<RET> browseCluster(final String iClusterName) {
checkOpeness();
checkSecurity(ODatabaseSecurityResources.CLUSTER, ORole.PERMISSION_READ, iClusterName);
return (OObjectIteratorCluster<RET>) new OObjectIteratorCluster<Object>(this,
(ODatabaseRecordAbstract<ODocument>) getUnderlying().getUnderlying(), getClusterIdByName(iClusterName));
}
public ODatabaseObjectTx load(final Object iPojo) {
return load(iPojo, null);
}
public ODatabaseObjectTx load(final Object iPojo, final String iFetchPlan) {
checkOpeness();
if (iPojo == null)
return this;
// GET THE ASSOCIATED DOCUMENT
final ODocument record = getRecordByUserObject(iPojo, true);
underlying.load(record);
stream2pojo(record, iPojo, iFetchPlan);
return this;
}
public Object load(final ORID iRecordId) {
return load(iRecordId, null);
}
public Object load(final ORID iRecordId, final String iFetchPlan) {
checkOpeness();
if (iRecordId == null)
return null;
ODocument record = rid2Records.get(iRecordId);
if (record == null) {
// GET THE ASSOCIATED DOCUMENT
record = underlying.load(iRecordId);
if (record == null)
return null;
}
Object result = records2Objects.get(record);
if (result != null)
// FOUND: JUST RETURN IT
return result;
result = stream2pojo(record, newInstance(record.getClassName()), iFetchPlan);
registerPojo(result, record);
return result;
}
/**
* If the record is new and a class was specified, the configured cluster id will be used to store the class.
*/
public ODatabaseObject save(final Object iContent) {
return save(iContent, null);
}
/**
* Store the record on the specified cluster only after having checked the cluster is allowed and figures in the configured and
* the record is valid following the constraints declared in the schema.
*
* @see ORecordSchemaAware#validate()
*/
public ODatabaseObject save(final Object iPojo, final String iClusterName) {
checkOpeness();
if (iPojo == null)
return this;
OSerializationThreadLocal.INSTANCE.get().clear();
// GET THE ASSOCIATED DOCUMENT
ODocument record = objects2Records.get(System.identityHashCode(iPojo));
if (record == null)
record = underlying.newInstance(iPojo.getClass().getSimpleName());
// REGISTER BEFORE TO SERIALIZE TO AVOID PROBLEMS WITH CIRCULAR DEPENDENCY
registerPojo(iPojo, record);
pojo2Stream(iPojo, record);
underlying.save(record, iClusterName);
// RE-REGISTER FOR NEW RECORDS SINCE THE ID HAS CHANGED
registerPojo(iPojo, record);
return this;
}
public ODatabaseObject delete(final Object iContent) {
checkOpeness();
if (iContent == null)
return this;
ODocument record = getRecordByUserObject(iContent, false);
if (record == null)
record = (ODocument) underlying.load(OObjectSerializerHelper.getObjectID(this, iContent));
underlying.delete(record);
if (getTransaction() instanceof OTransactionNoTx)
unregisterPojo(iContent, record);
return this;
}
public long countClass(final String iClassName) {
checkOpeness();
return underlying.countClass(iClassName);
}
public long countClass(final Class<?> iClass) {
checkOpeness();
return underlying.countClass(iClass.getSimpleName());
}
public ODictionary<Object> getDictionary() {
checkOpeness();
if (dictionary == null)
dictionary = new ODictionaryWrapper(this, underlying);
return dictionary;
}
@Override
public ODatabasePojoAbstract<ODocument, Object> commit() {
// COPY ALL TX ENTRIES
final List<OTransactionEntry<?>> entries;
if (getTransaction().getEntries() != null) {
entries = new ArrayList<OTransactionEntry<?>>();
for (OTransactionEntry<?> entry : getTransaction().getEntries())
entries.add(entry);
} else
entries = null;
underlying.commit();
if (entries != null) {
// UPDATE ID & VERSION FOR ALL THE RECORDS
Object pojo = null;
for (OTransactionEntry<?> entry : entries) {
pojo = records2Objects.get(entry.getRecord());
switch (entry.status) {
case OTransactionEntry.CREATED:
rid2Records.put(entry.getRecord().getIdentity(), (ODocument) entry.getRecord());
OObjectSerializerHelper.setObjectID(entry.getRecord().getIdentity(), pojo);
case OTransactionEntry.UPDATED:
OObjectSerializerHelper.setObjectVersion(entry.getRecord().getVersion(), pojo);
break;
case OTransactionEntry.DELETED:
OObjectSerializerHelper.setObjectID(null, pojo);
OObjectSerializerHelper.setObjectVersion(null, pojo);
unregisterPojo(pojo, (ODocument) entry.getRecord());
break;
}
}
}
return this;
}
@Override
public ODatabasePojoAbstract<ODocument, Object> rollback() {
// COPY ALL TX ENTRIES
final List<OTransactionEntry<?>> newEntries;
if (getTransaction().getEntries() != null) {
newEntries = new ArrayList<OTransactionEntry<?>>();
for (OTransactionEntry<?> entry : getTransaction().getEntries())
if (entry.status == OTransactionEntry.CREATED)
newEntries.add(entry);
} else
newEntries = null;
underlying.rollback();
if (newEntries != null) {
Object pojo = null;
for (OTransactionEntry<?> entry : newEntries) {
pojo = records2Objects.get(entry.getRecord());
OObjectSerializerHelper.setObjectID(null, pojo);
OObjectSerializerHelper.setObjectVersion(null, pojo);
}
}
objects2Records.clear();
records2Objects.clear();
rid2Records.clear();
return this;
}
public OEntityManager getEntityManager() {
return entityManager;
}
@Override
public ODatabaseDocument getUnderlying() {
return underlying;
}
/**
* Returns the version number of the object. Version starts from 0 assigned on creation.
*
* @param iPojo
* User object
*/
@Override
public int getVersion(final Object iPojo) {
checkOpeness();
final ODocument record = getRecordByUserObject(iPojo, false);
if (record != null)
return record.getVersion();
return OObjectSerializerHelper.getObjectVersion(iPojo);
}
/**
* Returns the object unique identity.
*
* @param iPojo
* User object
*/
public ORID getIdentity(final Object iPojo) {
checkOpeness();
final ODocument record = getRecordByUserObject(iPojo, false);
if (record != null)
return record.getIdentity();
return OObjectSerializerHelper.getObjectID(this, iPojo);
}
public Object newInstance() {
checkOpeness();
return new ODocument(underlying);
}
public <DBTYPE extends ODatabase> DBTYPE checkSecurity(final String iResource, final byte iOperation) {
return (DBTYPE) underlying.checkSecurity(iResource, iOperation);
}
public <DBTYPE extends ODatabase> DBTYPE checkSecurity(final String iResource, final int iOperation, Object... iResourcesSpecific) {
return (DBTYPE) underlying.checkSecurity(iResource, iOperation, iResourcesSpecific);
}
protected ODocument pojo2Stream(final Object iPojo, final ODocument iRecord) {
return OObjectSerializerHelper.toStream(iPojo, iRecord, getEntityManager(),
getMetadata().getSchema().getClass(iPojo.getClass().getSimpleName()), this, this, saveOnlyDirty);
}
protected Object stream2pojo(final ODocument record, final Object iPojo, final String iFetchPlan) {
return OObjectSerializerHelper.fromStream(record, iPojo, getEntityManager(), this, iFetchPlan);
}
public boolean isSaveOnlyDirty() {
return saveOnlyDirty;
}
public void setSaveOnlyDirty(boolean saveOnlyDirty) {
this.saveOnlyDirty = saveOnlyDirty;
}
}
| |
package io.advantageous.qbit.service.discovery.impl;
import io.advantageous.boon.core.Sys;
import io.advantageous.qbit.GlobalConstants;
import io.advantageous.qbit.QBit;
import io.advantageous.qbit.concurrent.PeriodicScheduler;
import io.advantageous.qbit.service.discovery.*;
import io.advantageous.qbit.service.discovery.spi.ServiceDiscoveryProvider;
import io.advantageous.qbit.service.health.HealthStatus;
import io.advantageous.qbit.util.ConcurrentHashSet;
import io.advantageous.qbit.util.Timer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Service Discovery. This is a generic service discovery class.
* It has two providers. If the primary provider fails, it uses the secondary provider.
*
* created by rhightower on 3/23/15.
*/
public class ServiceDiscoveryImpl implements ServiceDiscovery {
private final PeriodicScheduler periodicScheduler;
private final BlockingQueue<String> doneQueue = new LinkedTransferQueue<>();
private final BlockingQueue<ServiceHealthCheckIn> checkInsQueue = new LinkedTransferQueue<>();
private final BlockingQueue<EndpointDefinition> registerQueue = new LinkedTransferQueue<>();
private final ServiceChangedEventChannel serviceChangedEventChannel;
private final ServicePoolListener servicePoolListener;
private final ExecutorService executorService;
private final ConcurrentHashMap<String, ServicePool> servicePoolMap = new ConcurrentHashMap<>();
private final ConcurrentHashSet<EndpointDefinition> endpointDefinitions = new ConcurrentHashSet<>();
private final ServiceDiscoveryProvider provider;
private final Logger logger = LoggerFactory.getLogger(ServiceDiscoveryImpl.class);
private final boolean debug = GlobalConstants.DEBUG || logger.isDebugEnabled();
private final boolean trace = logger.isTraceEnabled();
private final int pollForServicesIntervalMS;
private final int checkInIntervalInMS;
private final ServiceDiscoveryProvider backupProvider;
private final ConcurrentHashSet<String> serviceNamesBeingLoaded = new ConcurrentHashSet<>();
private final AtomicBoolean stop = new AtomicBoolean();
private final Set<String> serviceNames = new TreeSet<>();
private long lastCheckIn;
public ServiceDiscoveryImpl(
final PeriodicScheduler periodicScheduler,
final ServiceChangedEventChannel serviceChangedEventChannel,
final ServiceDiscoveryProvider provider,
final ServiceDiscoveryProvider backupProvider,
final ServicePoolListener servicePoolListener,
final ExecutorService executorService,
final int pollForServicesIntervalSeconds,
final int checkInIntervalInSeconds) {
this.backupProvider = backupProvider;
this.checkInIntervalInMS = checkInIntervalInSeconds * 1000;
this.provider = provider;
this.pollForServicesIntervalMS = pollForServicesIntervalSeconds * 1000;
this.periodicScheduler =
periodicScheduler == null ? QBit.factory().periodicScheduler() : periodicScheduler;
this.serviceChangedEventChannel = serviceChangedEventChannel == null ?
serviceName -> {
} : serviceChangedEventChannel;
this.servicePoolListener = servicePoolListener == null ? serviceName -> {
} : servicePoolListener;
this.executorService = executorService == null ?
Executors.newCachedThreadPool(runnable -> new Thread(runnable, "ServiceDiscovery")) :
executorService;//Mostly sleeping threads doing long polls
if (trace) {
logger.trace(
"ServiceDiscoveryImpl created" + provider
);
}
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
if (!stop.get()) stop();
}));
}
public EndpointDefinition registerWithTTL(
final String serviceName,
final int port,
final int timeToLiveSeconds) {
if (trace) {
logger.trace(
"ServiceDiscoveryImpl::registerWithTTL() " + serviceName + " " + port
);
}
watch(serviceName);
EndpointDefinition endpointDefinition = new EndpointDefinition(HealthStatus.PASS,
serviceName + "-" + ServiceDiscovery.uniqueString(port),
serviceName, null, port, timeToLiveSeconds);
return doRegister(endpointDefinition);
}
public EndpointDefinition registerWithIdAndTimeToLive(
final String serviceName, final String serviceId, final int port, final int timeToLiveSeconds) {
if (trace) {
logger.trace(
"ServiceDiscoveryImpl::registerWithIdAndTimeToLive() " + serviceName + " " + port
);
}
watch(serviceName);
EndpointDefinition endpointDefinition = new EndpointDefinition(HealthStatus.PASS,
serviceId,
serviceName, null, port, timeToLiveSeconds);
return doRegister(endpointDefinition);
}
private EndpointDefinition doRegister(EndpointDefinition endpointDefinition) {
endpointDefinitions.add(endpointDefinition);
registerQueue.offer(endpointDefinition);
return endpointDefinition;
}
@Override
public EndpointDefinition register(final String serviceName, final int port) {
if (trace) {
logger.trace(
"ServiceDiscoveryImpl::register()" + serviceName + " " + port
);
}
watch(serviceName);
EndpointDefinition endpointDefinition = new EndpointDefinition(HealthStatus.PASS,
serviceName + "-" + ServiceDiscovery.uniqueString(port),
serviceName, null, port);
return doRegister(endpointDefinition);
}
@Override
public EndpointDefinition registerWithId(final String serviceName, final String serviceId, final int port) {
if (trace) {
logger.trace(
"ServiceDiscoveryImpl::registerWithId()" + serviceName + " " + port
);
}
watch(serviceName);
EndpointDefinition endpointDefinition = new EndpointDefinition(HealthStatus.PASS,
serviceId,
serviceName, null, port);
return doRegister(endpointDefinition);
}
@Override
public void watch(String serviceName) {
if (trace) {
logger.trace(
"ServiceDiscoveryImpl::watch()" + serviceName
);
}
if (!serviceNames.contains(serviceName)) {
serviceNames.add(serviceName);
doneQueue.offer(serviceName);
}
}
@Override
public void checkIn(final String serviceId, final HealthStatus healthStatus) {
if (trace) {
logger.trace(
"ServiceDiscoveryImpl::checkIn()" + serviceId, healthStatus
);
}
checkInsQueue.offer(new ServiceHealthCheckIn(serviceId, healthStatus));
}
@Override
public void checkInOk(final String serviceId) {
if (trace) {
logger.trace(
"ServiceDiscoveryImpl::checkInOk()" + serviceId
);
}
checkInsQueue.offer(new ServiceHealthCheckIn(serviceId, HealthStatus.PASS));
}
public ServicePool servicePool(final String serviceName) {
ServicePool servicePool = servicePoolMap.get(serviceName);
if (servicePool == null) {
servicePool = new ServicePool(serviceName, this.servicePoolListener);
servicePoolMap.put(serviceName, servicePool);
}
return servicePool;
}
@Override
public List<EndpointDefinition> loadServices(final String serviceName) {
if (trace) {
logger.trace(
"ServiceDiscoveryImpl::loadServices()" + serviceName
);
}
ServicePool servicePool = servicePoolMap.get(serviceName);
if (servicePool == null) {
servicePool = new ServicePool(serviceName, this.servicePoolListener);
servicePoolMap.put(serviceName, servicePool);
watch(serviceName);
return Collections.emptyList();
}
return servicePool.services();
}
public List<EndpointDefinition> loadServicesNow(final String serviceName) {
if (trace) {
logger.trace(
"ServiceDiscoveryImpl::loadServices()" + serviceName
);
}
ServicePool servicePool = servicePoolMap.get(serviceName);
if (servicePool == null) {
servicePool = new ServicePool(serviceName, this.servicePoolListener);
servicePoolMap.put(serviceName, servicePool);
try {
final List<EndpointDefinition> healthyServices = provider.loadServices(serviceName);
servicePool.setHealthyNodes(healthyServices, this.servicePoolListener);
} catch (Exception ex) {
logger.warn("Unable to load healthy nodes from primary service discovery provider", ex);
final List<EndpointDefinition> healthyServices = backupProvider.loadServices(serviceName);
servicePool.setHealthyNodes(healthyServices, this.servicePoolListener);
}
watch(serviceName);
}
return servicePool.services();
}
@Override
public void start() {
if (debug) {
logger.debug("Starting Service Discovery " + provider);
}
this.periodicScheduler.repeat(() -> {
try {
logger.info("Starting Consul monitor");
monitor();
} catch (Exception e) {
logger.error("ServiceDiscoveryImpl::" +
"Error while running monitor", e);
}
}, pollForServicesIntervalMS, TimeUnit.MILLISECONDS);
}
public void monitor() throws Exception {
while (!stop.get()) {
if (registerQueue.size() > 0) {
provider.registerServices(registerQueue);
}
if (doneQueue.size() > 0) {
executorService.submit(() -> {
/* There is no rush, we are periodically checking in.
* Protect the service registry from too aggressive config. */
loadHealthyServices();
});
}
if (checkInsQueue.size() > 0) {
provider.checkIn(checkInsQueue);
}
if (registerQueue.size() == 0) {
Sys.sleep(pollForServicesIntervalMS);
}
if (doneQueue.size()==0) {
long now = Timer.timer().now();
long duration = now - lastCheckIn;
if (duration > checkInIntervalInMS ) {
lastCheckIn = now;
doneQueue.addAll(serviceNames);
}
}
}
}
/** Iterate through the health service queue and load the services. */
private void loadHealthyServices() {
try {
String serviceName = doneQueue.poll();
while (serviceName != null) {
final String serviceNameToFetch = serviceName;
/* Don't load the service if it is already being loaded. */
if (!serviceNamesBeingLoaded.contains(serviceNameToFetch)) {
serviceNamesBeingLoaded.add(serviceNameToFetch);
executorService.submit(() -> {
/*
Loading a service pool might take a while so
the actual load operation happens in its own thread.
*/
doLoadHealthServices(serviceNameToFetch);
});
}
serviceName = doneQueue.poll();
}
}catch (Exception ex) {
ex.printStackTrace();
}
}
/**
* Loads the service from the remote service registry (i.e., consul).
* @param serviceNameToFetch service that we are loading a pool for.
*/
private void doLoadHealthServices(final String serviceNameToFetch) {
try {
final List<EndpointDefinition> healthyServices = provider.loadServices(serviceNameToFetch);
populateServiceMap(serviceNameToFetch, healthyServices);
} catch (Exception ex) {
doFailOverHealthServicesLoad(serviceNameToFetch, ex);
} finally {
/* Remove the service from the serviceNamesBeingLoaded
SET and add it back to the work pool
to get loaded again.
We are constantly loading services through long polling for changes.
*/
serviceNamesBeingLoaded.remove(serviceNameToFetch);
}
}
/**
* If the primary load failed, we could have a backup provider registered.
* @param serviceNameToFetch service pool to fetch
* @param ex
*/
private void doFailOverHealthServicesLoad(final String serviceNameToFetch, Exception ex) {
/* If there is a backup provider, load from there. */
if (backupProvider != null) {
if (debug) logger.debug("ServiceDiscoveryImpl::loadHealthyServices " +
"Error while loading healthy" +
" services for " + serviceNameToFetch, ex);
final List<EndpointDefinition> healthyServices = backupProvider.loadServices(serviceNameToFetch);
populateServiceMap(serviceNameToFetch, healthyServices);
serviceNamesBeingLoaded.remove(serviceNameToFetch);
} else {
logger.error("ServiceDiscoveryImpl::loadHealthyServices " +
"Error while loading healthy" +
" services for " + serviceNameToFetch, ex);
}
Sys.sleep(10_000); //primary is down so slow it down so we don't flow the system with updates of service pools.
}
/**
* Populate the service map.
* Look up the service pool.
* Apply the healthy services so the pool can see if there were changes (additions, removal, etc.)
* @param serviceName service name
* @param healthyServices list of healthy services that we just loaded.
*/
private void populateServiceMap(final String serviceName,
final List<EndpointDefinition> healthyServices) {
final ServicePool servicePool = servicePool(serviceName);
/* If there were changes then send a service pool change event on the event channel. */
if (servicePool.setHealthyNodes(healthyServices)) {
serviceChangedEventChannel.servicePoolChanged(serviceName);
serviceChangedEventChannel.flushEvents();
}
}
@Override
public void stop() {
if (debug) {
logger.debug("Stopping Service Discovery");
}
provider.unregisterServices(endpointDefinitions);
this.periodicScheduler.stop();
this.stop.set(true);
}
public Set<EndpointDefinition> localDefinitions() {
return endpointDefinitions;
}
}
| |
/*
* ******************************************************************************
* Copyright (c) 2013 Roman Nurik,2013-2014 Gabriele Mariotti.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* *****************************************************************************
*/
package it.gmariotti.cardslib.library.view.listener;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ValueAnimator;
import android.view.MotionEvent;
import android.view.VelocityTracker;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import it.gmariotti.cardslib.library.R;
import it.gmariotti.cardslib.library.internal.Card;
import it.gmariotti.cardslib.library.view.base.CardViewWrapper;
/**
* It is based on Roman Nurik code.
* See this link for original code https://github.com/romannurik/Android-SwipeToDismiss
* <p/>
* </p>
* It provides a SwipeDismissViewTouchListener for a single Card.
* </p>
* If you are using a list, see {@link SwipeDismissListViewTouchListener}
* </p>
*
* @author Gabriele Mariotti (gabri.mariotti@gmail.com)
*/
public class SwipeDismissViewTouchListener implements View.OnTouchListener {
// Cached ViewConfiguration and system-wide constant values
private int mSlop;
private int mMinFlingVelocity;
private int mMaxFlingVelocity;
private long mAnimationTime;
// Fixed properties
private CardViewWrapper mCardView;
private DismissCallbacks mCallbacks;
private int mViewWidth = 1; // 1 and not 0 to prevent dividing by zero
// Transient properties
private float mDownX;
private float mDownY;
private Card mToken;
private boolean mSwiping;
private int mSwipingSlop;
private VelocityTracker mVelocityTracker;
private boolean mPaused;
private float mTranslationX;
private int swipeDistanceDivisor = 2;
/**
* Constructs a new swipe-to-dismiss touch listener for the given view.
*
* @param cardView The card view which should be dismissable.
* @param callbacks The callback to trigger when the user has indicated that she
*/
public SwipeDismissViewTouchListener(CardViewWrapper cardView,
Card card,
DismissCallbacks callbacks) {
ViewConfiguration vc = ViewConfiguration.get(cardView.getContext());
mSlop = vc.getScaledTouchSlop();
mMinFlingVelocity = vc.getScaledMinimumFlingVelocity() * 16;
mMaxFlingVelocity = vc.getScaledMaximumFlingVelocity();
mAnimationTime = cardView.getContext().getResources()
.getInteger(android.R.integer.config_shortAnimTime);
mCardView = cardView;
mToken = card;
mCallbacks = callbacks;
swipeDistanceDivisor = cardView.getContext().getResources().getInteger(R.integer.list_card_swipe_distance_divisor);
}
/**
* Enables or disables (pauses or resumes) watching for swipe-to-dismiss
* gestures.
*
* @param enabled Whether or not to watch for gestures.
*/
public void setEnabled(boolean enabled) {
mPaused = !enabled;
}
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
// offset because the view is translated during swipe
motionEvent.offsetLocation(mTranslationX, 0);
if (mViewWidth < 2) {
mViewWidth = ((View) mCardView).getWidth();
}
switch (motionEvent.getActionMasked()) {
case MotionEvent.ACTION_DOWN: {
if (mPaused) {
return false;
}
// TODO: ensure this is a finger, and set a flag
mDownX = motionEvent.getRawX();
mDownY = motionEvent.getRawY();
if (mCallbacks.canDismiss(mToken)) {
mVelocityTracker = VelocityTracker.obtain();
mVelocityTracker.addMovement(motionEvent);
}
view.onTouchEvent(motionEvent);
return true;
//return false; fixing swipe and click together
}
case MotionEvent.ACTION_UP: {
if (mVelocityTracker == null) {
break;
}
float deltaX = motionEvent.getRawX() - mDownX;
mVelocityTracker.addMovement(motionEvent);
mVelocityTracker.computeCurrentVelocity(1000);
float velocityX = mVelocityTracker.getXVelocity();
float absVelocityX = Math.abs(velocityX);
float absVelocityY = Math.abs(mVelocityTracker.getYVelocity());
boolean dismiss = false;
boolean dismissRight = false;
if (Math.abs(deltaX) > mViewWidth / swipeDistanceDivisor && mSwiping) {
dismiss = true;
dismissRight = deltaX > 0;
} else if (mMinFlingVelocity <= absVelocityX
&& absVelocityX <= mMaxFlingVelocity
&& absVelocityY < absVelocityX && mSwiping) {
// dismiss only if flinging in the same direction as dragging
dismiss = (velocityX < 0) == (deltaX < 0);
dismissRight = mVelocityTracker.getXVelocity() > 0;
}
if (dismiss) {
// dismiss
((View) mCardView).animate()
.translationX(dismissRight ? mViewWidth : -mViewWidth)
.alpha(0).setDuration(mAnimationTime)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
performDismiss();
}
});
} else if (mSwiping) {
// cancel
((View) mCardView).animate().translationX(0).alpha(1)
.setDuration(mAnimationTime).setListener(null);
}
mVelocityTracker.recycle();
mVelocityTracker = null;
mTranslationX = 0;
mDownX = 0;
mDownY = 0;
mSwiping = false;
break;
}
case MotionEvent.ACTION_CANCEL: {
if (mVelocityTracker == null) {
break;
}
((View) mCardView).animate()
.translationX(0)
.alpha(1)
.setDuration(mAnimationTime)
.setListener(null);
mVelocityTracker.recycle();
mVelocityTracker = null;
mTranslationX = 0;
mDownX = 0;
mDownY = 0;
mSwiping = false;
break;
}
case MotionEvent.ACTION_MOVE: {
if (mVelocityTracker == null || mPaused) {
break;
}
mVelocityTracker.addMovement(motionEvent);
float deltaX = motionEvent.getRawX() - mDownX;
float deltaY = motionEvent.getRawY() - mDownY;
if (Math.abs(deltaX) > mSlop && Math.abs(deltaY) < Math.abs(deltaX) / 2) {
mSwiping = true;
((View) mCardView).getParent().requestDisallowInterceptTouchEvent(true);
mSwipingSlop = (deltaX > 0 ? mSlop : -mSlop);
// Cancel ListView's touch (un-highlighting the item)
MotionEvent cancelEvent = MotionEvent.obtain(motionEvent);
cancelEvent
.setAction(MotionEvent.ACTION_CANCEL
| (motionEvent.getActionIndex() << MotionEvent.ACTION_POINTER_INDEX_SHIFT));
((View) mCardView).onTouchEvent(cancelEvent);
cancelEvent.recycle();
}
if (mSwiping) {
mTranslationX = deltaX;
//((View)mCardView).setTranslationX(deltaX);
((View) mCardView).setTranslationX(deltaX - mSwipingSlop);
((View) mCardView).setAlpha(Math.max(0f,
Math.min(1f, 1f - 2f * Math.abs(deltaX) / mViewWidth)));
return true;
}
break;
}
}
return false;
}
private void performDismiss() {
// Animate the dismissed view to zero-height and then fire the dismiss callback.
// This triggers layout on each animation frame; in the future we may want to do something
// smarter and more performant.
final ViewGroup.LayoutParams lp = ((View) mCardView).getLayoutParams();
final int originalHeight = ((View) mCardView).getHeight();
ValueAnimator animator = ValueAnimator.ofInt(originalHeight, 1)
.setDuration(mAnimationTime);
animator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
mCallbacks.onDismiss(mCardView, mToken);
// Reset view presentation
((View) mCardView).setAlpha(1f);
((View) mCardView).setTranslationX(0);
//ViewGroup.LayoutParams lp = mCardView.getLayoutParams();
lp.height = originalHeight;
((View) mCardView).setLayoutParams(lp);
}
});
animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
lp.height = (Integer) valueAnimator.getAnimatedValue();
((View) mCardView).setLayoutParams(lp);
}
});
animator.start();
}
/**
* The callback interface used by {@link SwipeDismissViewTouchListener}
* to inform its client about a successful dismissal of the view for which it was created.
*/
public interface DismissCallbacks {
/**
* Called to determine whether the given position can be dismissed.
*/
boolean canDismiss(Card card);
/**
* Called when the user has indicated they she would like to dismiss the view.
*
* @param cardView The originating {@link it.gmariotti.cardslib.library.view.CardView}.
* @parma card Card
*/
void onDismiss(CardViewWrapper cardView, Card card);
}
}
| |
/*
* Copyright (c) 2022, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.structure;
import boofcv.alg.structure.SceneWorkingGraph.View;
import boofcv.misc.BoofMiscOps;
import lombok.Getter;
import org.ddogleg.sorting.QuickSort_F64;
import org.ddogleg.struct.DogArray;
import org.ddogleg.struct.DogArray_F64;
import org.ddogleg.struct.VerbosePrint;
import org.jetbrains.annotations.Nullable;
import java.io.PrintStream;
import java.util.*;
import static boofcv.misc.BoofMiscOps.checkTrue;
/**
* Selects a subset of views from a {@link SceneWorkingGraph} as the first step before performing local bundle
* adjustment. The goal is to select the subset of views which would contribute the most to the target view's
* state estimate. To keep computational limits in check the user needs to specifies a maximum number of views.
*
* Every connection between views is assigned a score. The goal is to find the subset of views which maximizes
* the minimum score across all connections between views. This is approximated using the greedy algorithm below:
*
* Summary:
* <ol>
* <li>Set of views will be the target's neighbors and their neighbors</li>
* <li>Create a list of all edges and their scores that connect these</li>
* <li>Repeat the loop below until only N views remain</li>
* <ol>
* <li>Select the edge with the lowest score</li>
* <li>Pick one of the views it is connected to be prune based on their connections</li>
* <li>Remove the view and all the edges connected to it</li>
* </ol>
* </ol>
*
* Care is taken so that not all the direct neighbors are removed by setting a {@link #minNeighbors minimum number}
* that must remain at the end. If all neighbors are removed then there is the possibility that all the remaining
* nodes will not see the same features as the target.
*
* <p>
* WARNING: There is a known flaw where a multiple disconnected graphs can be created in a poorly connected graph<br>
* </p>
*
* @author Peter Abeles
*/
public class SelectNeighborsAroundView implements VerbosePrint {
/** Moving number of views in the constructed working graph. */
public int maxViews = 10;
/** When a view is scored for removal the N-worstOfTop connection is used for the score. */
public int worstOfTop = 3;
/** There should be at least this number of direct neighbors in the final list */
public int minNeighbors = 2;
/** Copy of the local scene which can be independently optimized */
protected @Getter final SceneWorkingGraph localWorking = new SceneWorkingGraph();
private @Nullable PrintStream verbose;
//-------------- Internal Working Space
// Keeps track of how many direct connections to the target remain
int remainingNeighbors;
// List of all views that it will consider for inclusion in the sub-graph
List<View> candidates = new ArrayList<>();
// Fast look up of candidates
Map<String, View> lookup = new HashMap<>();
// List of all edges with scores
DogArray<EdgeScore> edges = new DogArray<>(EdgeScore::new);
// indicates if a view has been included in an inlier set
Set<String> hasFeatures = new HashSet<>();
// storage for the score of connected edges
DogArray_F64 connScores = new DogArray_F64();
QuickSort_F64 sorter = new QuickSort_F64();
/**
* Computes a local graph with the view as a seed. Local graph can be accessed by calling {@link #getLocalWorking()}
*
* @param target (Input) The view in 'working' that a local graph is to be built around
* @param working (Input) A graph of the entire scene that a sub graph is to be made from
*/
public void process( View target, SceneWorkingGraph working ) {
initialize();
// Create the initial list of candidate views for inclusion in the sub graph
addNeighbors2(target, working);
// Prune the number of views down to the minimal number
pruneViews(target);
// Create the local working graph that can be optimized
createLocalGraph(target, working);
}
void initialize() {
edges.reset();
candidates.clear();
lookup.clear();
localWorking.reset();
}
/**
* Adds target view's neighbors and their neighbors to the list of candidate views
*/
void addNeighbors2( View seed, SceneWorkingGraph working ) {
// put the target into the lookup list to avoid double counting. Target isn't a candidate since it's mandatory
lookup.put(seed.pview.id, seed);
// Add immediate neighbors to candidate list
for (int connIdx = 0; connIdx < seed.pview.connections.size; connIdx++) {
PairwiseImageGraph.Motion m = seed.pview.connections.get(connIdx);
View o = working.views.get(m.other(seed.pview).id);
// No corresponding working view
if (o == null)
continue;
addCandidateView(o);
}
// Note the number of neighbors since we will keep track of how many are left in the candidates list
remainingNeighbors = candidates.size();
// Add the neighbor's neighbors to the candidate list
for (int candIdx = candidates.size() - 1; candIdx >= 0; candIdx--) {
View c = candidates.get(candIdx);
for (int connIdx = 0; connIdx < c.pview.connections.size; connIdx++) {
PairwiseImageGraph.Motion m = c.pview.connections.get(connIdx);
View o = working.views.get(m.other(c.pview).id);
// No corresponding working view
if (o == null)
continue;
// don't add if it's already in there
if (lookup.containsKey(o.pview.id))
continue;
addCandidateView(o);
}
}
// Now that all the node have been we can create the edges
addEdgesOf(working, seed);
for (int candIdx = 0; candIdx < candidates.size(); candIdx++) {
View c = candidates.get(candIdx);
addEdgesOf(working, c);
}
}
void addCandidateView( View o ) {
candidates.add(o);
lookup.put(o.pview.id, o);
}
/**
* Add all the edges/connections for view 'c' that point to a candidate
*/
private void addEdgesOf( SceneWorkingGraph working, View c ) {
for (int connIdx = 0; connIdx < c.pview.connections.size; connIdx++) {
PairwiseImageGraph.Motion m = c.pview.connections.get(connIdx);
View o = working.views.get(m.other(c.pview).id);
// No corresponding working view
if (o == null)
continue;
// If it's not pointing too a candidate or the target, do not add this edge
if (!lookup.containsKey(o.pview.id))
continue;
// To avoid adding the same edge twice use the unique ID
if (c.pview.id.compareTo(o.pview.id) >= 0)
continue;
addEdge(m);
}
}
/**
* Reduces the candidate size until the requested maximum number of views has been meet
*/
void pruneViews( View seed ) {
if (verbose != null) verbose.println("ENTER pruneViews target=" + seed.pview.id);
// maxViews-1 because the target is not in the candidates list
while (candidates.size() > maxViews - 1) {
// Search for the edge with the lowest score. The reason we don't just sort once and be done with it
// is that each time we remove an element from the list that's an O(N) operation or remove swap O(1)
// but need to re-sort it
int lowestIndex = -1;
double lowestScore = Double.MAX_VALUE;
for (int i = 0; i < edges.size; i++) {
EdgeScore s = edges.get(i);
if (s.m.score3D >= lowestScore)
continue;
// See if too many neighbors have been removed and that it should keep the remaining
if (remainingNeighbors <= minNeighbors && s.m.isConnected(seed.pview))
continue;
// Check here if removing the node would cause a split. If so don't mark it as the lowest
lowestScore = s.m.score3D;
lowestIndex = i;
}
if (lowestIndex < 0) {
// it can fail to find a solution if removing any candidate would remove too many neighbors
// it's also possible it was miss configured
if (minNeighbors < maxViews - 1) {
throw new IllegalArgumentException("Miss configured: 'minNeighbors' >= 'maxViews-1'");
}
return;
}
if (verbose != null)
verbose.println("Candidates.size=" + candidates.size() + " Pruning score=" + lowestScore +
" " + edges.get(lowestIndex).m);
PairwiseImageGraph.Motion m = edges.get(lowestIndex).m;
if (m.isConnected(seed.pview)) {
// Remove a neighbor of the target. No need to select which one to remove since the
// target can't be removed
removeCandidateNode(m.other(seed.pview).id, seed);
if (verbose != null)
verbose.println("Connects to target. No need to score. neighbors=" + remainingNeighbors);
} else {
// be careful to not remove a view which links to the seed if it's below the limit
boolean touchSeedSrc = remainingNeighbors <= minNeighbors && m.src.findMotion(seed.pview) != null;
boolean touchSeedDst = remainingNeighbors <= minNeighbors && m.dst.findMotion(seed.pview) != null;
double scoreSrc, scoreDst;
if (touchSeedSrc == touchSeedDst) {
// they either both touch or neither touch, either way the score should be the tie breaker
scoreSrc = scoreForRemoval(m.src, m);
scoreDst = scoreForRemoval(m.dst, m);
} else {
// Prefer to remove the seed which does not touch the seed
scoreSrc = touchSeedSrc ? Double.MAX_VALUE : scoreForRemoval(m.src, m);
scoreDst = touchSeedDst ? Double.MAX_VALUE : scoreForRemoval(m.dst, m);
}
if (verbose != null) verbose.println("Scores: src=" + scoreSrc + " dst=" + scoreDst);
removeCandidateNode(((scoreSrc < scoreDst) ? m.src : m.dst).id, seed);
}
// WARNING: This should be made a bit less naive by considering if removing a View would cause other
// Views to have no path to the target. This could probably be done efficiently by saving a reference
// towards the target view
// WARNING: There is nothing stopping it from pruning all of the target's neighbors also!
}
}
/**
* Score the quality of a View based on the worst score of its top N connections.
*
* @param v The view being considered for removal
* @param ignore skips this motion
* @return score
*/
double scoreForRemoval( PairwiseImageGraph.View v, PairwiseImageGraph.Motion ignore ) {
connScores.reset();
for (int connIdx = 0; connIdx < v.connections.size; connIdx++) {
PairwiseImageGraph.Motion m = v.connections.get(connIdx);
if (m == ignore)
continue;
String o = m.other(v).id;
if (!lookup.containsKey(o)) {
continue;
}
connScores.add(m.score3D);
}
if (connScores.size == 0)
return 0.0;
// Find the N-worstOfTop best hypothesis
connScores.sort(sorter);
int idx = Math.max(0, connScores.size - worstOfTop);
return connScores.get(idx);
}
/**
* Removes the specified view from the candidate list and then searches for all of its
* edges in the edge list and removes those
*
* @param id View id that is to be removed from candidate list
*/
void removeCandidateNode( String id, View seed ) {
if (verbose != null) verbose.println("Removing candidate view='" + id + "'");
// Remove the specified node from the candidate list data structures
View v = lookup.remove(id);
BoofMiscOps.checkTrue(candidates.remove(v));
// Keep track of how many direct neighbors to the target have been removed
if (null != v.pview.findMotion(seed.pview)) {
remainingNeighbors--;
if (verbose != null)
verbose.println("Neighbor of seed has been removed. view='" + id + "' remaining=" + remainingNeighbors);
}
// Remove all edges that point to this view in the edge list
for (int connIdx = 0; connIdx < v.pview.connections.size; connIdx++) {
PairwiseImageGraph.Motion m = v.pview.connections.get(connIdx);
View o = lookup.get(m.other(v.pview).id);
if (o == null)
continue;
// If by removing the target one of it's connections is now an orphan remove that note
// TODO see comment about graph splits. This should be handled by that logic and this removed
if (isOrphan(o)) {
if (verbose != null) verbose.println("Removing orphaned view='" + o.pview.id + "'");
checkTrue(null != lookup.remove(o.pview.id), "Not in lookup list");
checkTrue(candidates.remove(o), "Can't remove. Not in candidate list");
}
boolean found = false;
for (int i = 0; i < edges.size; i++) {
if (edges.get(i).m != m)
continue;
edges.removeSwap(i);
found = true;
break;
}
checkTrue(found, "No matching edge found. BUG. id='" + id + "' m.other='" + o.pview.id + "'");
}
}
/**
* Checks to see if the View has any connections to another candidate
*/
boolean isOrphan( View v ) {
for (int connIdx = 0; connIdx < v.pview.connections.size; connIdx++) {
PairwiseImageGraph.Motion m = v.pview.connections.get(connIdx);
String o = m.other(v.pview).id;
if (lookup.containsKey(o))
return false;
}
return true;
}
/**
* Create a local graph from all the candidate Views
*
* @param seed Target view from original graph that the local graph is made around
* @param working The original graph
*/
void createLocalGraph( View seed, SceneWorkingGraph working ) {
hasFeatures.clear();
addViewToLocal(working, seed);
for (int i = 0; i < candidates.size(); i++) {
addViewToLocal(working, candidates.get(i));
}
// For views with no features look at its neighbors to see if it's an inlier then add those
for (int i = 0; i < candidates.size(); i++) {
View origView = localWorking.lookupView(candidates.get(i).pview.id);
if (hasFeatures.contains(origView.pview.id))
continue;
SceneWorkingGraph.InlierInfo origInlier = origView.inliers.grow();
// Search for a connected view which has this view inside its inlier list. There has to be
// at least one
connectionsLoop:
for (int connIdx = 0; connIdx < origView.pview.connections.size; connIdx++) {
PairwiseImageGraph.Motion m = origView.pview.connections.get(connIdx);
View v = working.views.get(m.other(origView.pview).id);
// skip if there isn't a known metric upgrade yet
if (v == null)
continue;
for (int infoIdx = 0; infoIdx < v.inliers.size; infoIdx++) {
SceneWorkingGraph.InlierInfo otherInlier = v.inliers.get(infoIdx);
for (int j = 0; j < otherInlier.views.size; j++) {
if (!otherInlier.views.get(j).id.equals(origView.pview.id))
continue;
// Create an inlier list
origInlier.views.add(origView.pview);
origInlier.observations.grow().setTo(otherInlier.observations.get(j));
// Use the first inlier list that we found
break;
}
// Found one. Now it's done
if (!origInlier.isEmpty())
break connectionsLoop;
}
}
checkTrue(!origInlier.isEmpty(), "BUG! there can be no estimated state if it was never in an " +
"inlier list of a neighbor. view.id=" + origView.pview.id);
}
}
/**
* Adds the view in the original graph to the new local one while copying over all the useful information
*
* @param origView View in the original graph that was passed in
*/
void addViewToLocal( SceneWorkingGraph origScene, View origView ) {
// Add the camera it references if it has not already been added
SceneWorkingGraph.Camera origCamera = origScene.getViewCamera(origView);
SceneWorkingGraph.Camera localCamera = localWorking.cameras.get(origCamera.indexDB);
if (localCamera == null) {
localCamera = localWorking.addCameraCopy(origCamera);
}
// Create the local node
View localView = localWorking.addView(origView.pview, localCamera);
// copy geometric information over
localView.world_to_view.setTo(origView.world_to_view);
for (int infoIdx = 0; infoIdx < origView.inliers.size; infoIdx++) {
SceneWorkingGraph.InlierInfo origInliers = origView.inliers.get(infoIdx);
SceneWorkingGraph.InlierInfo localInliers = localView.inliers.grow();
// Copy over inliers that are in the sub graph
for (int i = 0; i < origInliers.views.size; i++) {
PairwiseImageGraph.View pview = origInliers.views.get(i);
// see if it's in the sub graph
if (!lookup.containsKey(pview.id))
continue;
// mark this as having features assigned to it
hasFeatures.add(pview.id);
// Add the inliers to the local view
localInliers.views.add(pview);
localInliers.observations.grow().setTo(origInliers.observations.get(i));
}
}
}
private void addEdge( PairwiseImageGraph.Motion m ) {
EdgeScore edgeScore = edges.grow();
edgeScore.m = m;
}
@Override
public void setVerbose( @Nullable PrintStream out, @Nullable Set<String> configuration ) {
this.verbose = out;
}
// TODO remove this
@SuppressWarnings({"NullAway.Init"})
static class EdgeScore {
// The motion this edge references
PairwiseImageGraph.Motion m;
}
}
| |
/*L
* Copyright RTI International
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/webgenome/LICENSE.txt for details.
*/
/*
$Revision: 1.4 $
$Date: 2007-09-06 16:48:10 $
*/
package org.rti.webgenome.service.plot;
import java.util.Collection;
import java.util.Set;
import org.rti.webgenome.core.Constants;
import org.rti.webgenome.domain.Experiment;
import org.rti.webgenome.domain.GenomeInterval;
/**
* Parameters that are relevant to plots where data values
* are represented by a color coding system.
* @author dhall
*
*/
public abstract class HeatMapPlotParameters extends BaseGenomicPlotParameters {
//
// STATICS
//
/**
* Default minimum color saturation value.
* Data values
* less than or equal to <code>minSaturation</code>
* will be mapped to
* pure green (#00FF00) in the plot.
*/
public static final float DEF_MIN_SATURATION = (float) -1.0;
/**
* Default maximum color saturation value.
* Data values
* greater than or equal to <code>maxSaturation</code>
* will be mapped to pure red
* (#FF0000) in the plot.
*/
public static final float DEF_MAX_SATURATION = (float) 1.0;
/**
* Default minimum mask value. Values between
* <code>minMask</code> and <code>maxMask</code>
* are filtered out of plot.
*/
public static final float DEF_MIN_MASK = Constants.BIG_FLOAT;
/**
* Default maximum mask value.
* Values between
* <code>minMask</code> and <code>maxMask</code>
* are filtered out of plot.
*/
public static final float DEF_MAX_MASK = Constants.SMALL_FLOAT;
//
// ATTRIBUTES
//
/**
* Maximum color saturation value for expression data. Data values
* greater than or equal to this will be mapped to pure red
* (#FF0000) in the plot.
*/
private float expressionMaxSaturation = DEF_MAX_SATURATION;
/**
* Minium color saturation value for expression data. Data values
* less than or equal to this will be mapped to
* pure green (#00FF00) in the plot.
*/
private float expressionMinSaturation = DEF_MIN_SATURATION;
/**
* Maximum color saturation value for copy number data. Data values
* greater than or equal to this will be mapped to pure red
* (#FF0000) in the plot.
*/
private float copyNumberMaxSaturation = DEF_MAX_SATURATION;
/**
* Minium color saturation value for copy number data. Data values
* less than or equal to this will be mapped to
* pure green (#00FF00) in the plot.
*/
private float copyNumberMinSaturation = DEF_MIN_SATURATION;
/**
* Minimum mask value. Values between
* <code>minMask</code> and <code>maxMask</code>
* are filtered out of plot.
*/
private float minMask = DEF_MIN_MASK;
/**
* Maximum mask value. Values between
* <code>minMask</code> and <code>maxMask</code>
* are filtered out of plot.
*/
private float maxMask = DEF_MAX_MASK;
//
// GETTERS/SETTERS
//
/**
* Set maximum mask. Values between
* <code>minMask</code> and <code>maxMask</code>
* are filtered out of plot.
* @return Maximum mask value
*/
public final float getMaxMask() {
return maxMask;
}
/**
* Set maximum mask. Values between
* <code>minMask</code> and <code>maxMask</code>
* are filtered out of plot.
* @param maxMask Maximum mask
*/
public final void setMaxMask(final float maxMask) {
this.maxMask = maxMask;
}
/**
* Get minimum mask. Values between
* <code>minMask</code> and <code>maxMask</code>
* are filtered out of plot.
* @return Minimum mask
*/
public final float getMinMask() {
return minMask;
}
/**
* Set minimum mask. Values between
* <code>minMask</code> and <code>maxMask</code>
* are filtered out of plot.
* @param minMask Minimum
*/
public final void setMinMask(final float minMask) {
this.minMask = minMask;
}
/**
* Get maximum color saturation value for expression data.
* Data values
* greater than or equal to this will be mapped to pure red.
* (#FF0000) in the plot.
* @return Maximum color saturation value.
*/
public final float getExpressionMaxSaturation() {
return expressionMaxSaturation;
}
/**
* Set maximum color saturation value for expression data.
* Data values
* greater than or equal to this will be mapped to pure red.
* @param maxSaturation Maximum color saturation value.
*/
public final void setExpressionMaxSaturation(final float maxSaturation) {
this.expressionMaxSaturation = maxSaturation;
}
/**
* Get minimum color saturation value for expression data.
* Data values
* less than or equal to this will be mapped to pure green.
* (#00FF00) in the plot.
* @return Minimum color saturation value.
*/
public final float getExpressionMinSaturation() {
return expressionMinSaturation;
}
/**
* Set minimum color saturation value for expression data.
* Data values
* less than or equal to this will be mapped to pure green.
* (#00FF00) in the plot.
* @param minSaturation Minimum color saturation value.
*/
public final void setExpressionMinSaturation(final float minSaturation) {
this.expressionMinSaturation = minSaturation;
}
/**
* Get maximum color saturation value for copy number data.
* Data values
* greater than or equal to this will be mapped to pure red.
* (#FF0000) in the plot.
* @return Maximum color saturation value.
*/
public final float getCopyNumberMaxSaturation() {
return copyNumberMaxSaturation;
}
/**
* Set maximum color saturation value for copy number data.
* Data values
* greater than or equal to this will be mapped to pure red.
* @param maxSaturation Maximum color saturation value.
*/
public final void setCopyNumberMaxSaturation(final float maxSaturation) {
this.copyNumberMaxSaturation = maxSaturation;
}
/**
* Get minimum color saturation value for copy number data.
* Data values
* less than or equal to this will be mapped to pure green.
* (#00FF00) in the plot.
* @return Minimum color saturation value.
*/
public final float getCopyNumberMinSaturation() {
return copyNumberMinSaturation;
}
/**
* Set minimum color saturation value for copy number data.
* Data values
* less than or equal to this will be mapped to pure green.
* (#00FF00) in the plot.
* @param minSaturation Minimum color saturation value.
*/
public final void setCopyNumberMinSaturation(final float minSaturation) {
this.copyNumberMinSaturation = minSaturation;
}
//
// CONSTRUCTORS
//
/**
* Constructor.
*/
public HeatMapPlotParameters() {
super();
}
/**
* Constructor that performs deep copy of given parameter.
* @param params Parameter whose properties will be deep copied.
*/
public HeatMapPlotParameters(final HeatMapPlotParameters params) {
super(params);
this.maxMask = params.maxMask;
this.copyNumberMaxSaturation = params.copyNumberMaxSaturation;
this.copyNumberMinSaturation = params.copyNumberMinSaturation;
this.expressionMaxSaturation = params.expressionMaxSaturation;
this.expressionMinSaturation = params.expressionMinSaturation;
this.minMask = params.minMask;
}
//
// BUSINESS METHODS
//
/**
* Derive any attributes not supplied by the user
* from the given experiments.
* @param experiments Experiments from which to derive
* attributes not supplied by user.
*/
public void deriveMissingAttributes(
final Collection<Experiment> experiments) {
super.deriveMissingAttributes(experiments);
Set<Short> chromosomes = GenomeInterval.getChromosomes(
this.getGenomeIntervals());
if (Float.isNaN(this.getExpressionMinSaturation())
|| this.getExpressionMinSaturation() == Constants.FLOAT_NAN) {
float min = Experiment.findMinExpressionValue(
experiments, chromosomes);
this.setExpressionMinSaturation(min);
}
if (Float.isNaN(this.getExpressionMaxSaturation())
|| this.getExpressionMaxSaturation() == Constants.FLOAT_NAN) {
float max = Experiment.findMaxExpressionValue(
experiments, chromosomes);
this.setExpressionMaxSaturation(max);
}
if (Float.isNaN(this.getCopyNumberMinSaturation())
|| this.getCopyNumberMinSaturation() == Constants.FLOAT_NAN) {
float min = Experiment.findMinCopyNumberValue(
experiments, chromosomes);
this.setCopyNumberMinSaturation(min);
}
if (Float.isNaN(this.getCopyNumberMaxSaturation())
|| this.getCopyNumberMaxSaturation() == Constants.FLOAT_NAN) {
float max = Experiment.findMaxCopyNumberValue(
experiments, chromosomes);
this.setCopyNumberMaxSaturation(max);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.commons.bcel6.verifier.structurals;
import org.apache.commons.bcel6.generic.ReferenceType;
import org.apache.commons.bcel6.generic.Type;
import org.apache.commons.bcel6.verifier.exc.AssertionViolatedException;
import org.apache.commons.bcel6.verifier.exc.StructuralCodeConstraintException;
/**
* This class implements an array of local variables used for symbolic JVM
* simulation.
*
* @version $Id$
*/
public class LocalVariables implements Cloneable {
/** The Type[] containing the local variable slots. */
private final Type[] locals;
/**
* Creates a new LocalVariables object.
*/
public LocalVariables(int maxLocals){
locals = new Type[maxLocals];
for (int i=0; i<maxLocals; i++){
locals[i] = Type.UNKNOWN;
}
}
/**
* Returns a deep copy of this object; i.e. the clone
* operates on a new local variable array.
* However, the Type objects in the array are shared.
*/
@Override
public LocalVariables clone(){
LocalVariables lvs = new LocalVariables(locals.length);
for (int i=0; i<locals.length; i++){
lvs.locals[i] = this.locals[i];
}
return lvs;
}
/**
* Returns the type of the local variable slot i.
*/
public Type get(int i){
return locals[i];
}
/**
* Returns a (correctly typed) clone of this object.
* This is equivalent to ((LocalVariables) this.clone()).
*/
public LocalVariables getClone(){
return (LocalVariables) this.clone();
}
/**
* Returns the number of local variable slots this
* LocalVariables instance has.
*/
public int maxLocals(){
return locals.length;
}
/**
* Sets a new Type for the given local variable slot.
*/
public void set(int i, Type type){ // TODO could be package-protected?
if (type == Type.BYTE || type == Type.SHORT || type == Type.BOOLEAN || type == Type.CHAR){
throw new AssertionViolatedException("LocalVariables do not know about '"+type+"'. Use Type.INT instead.");
}
locals[i] = type;
}
/** @return a hash code value for the object.
*/
@Override
public int hashCode() { return locals.length; }
/*
* Fulfills the general contract of Object.equals().
*/
@Override
public boolean equals(Object o){
if (!(o instanceof LocalVariables)) {
return false;
}
LocalVariables lv = (LocalVariables) o;
if (this.locals.length != lv.locals.length) {
return false;
}
for (int i=0; i<this.locals.length; i++){
if (!this.locals[i].equals(lv.locals[i])){
//System.out.println(this.locals[i]+" is not "+lv.locals[i]);
return false;
}
}
return true;
}
/**
* Merges two local variables sets as described in the Java Virtual Machine Specification,
* Second Edition, section 4.9.2, page 146.
*/
public void merge(LocalVariables lv){
if (this.locals.length != lv.locals.length){
throw new AssertionViolatedException("Merging LocalVariables of different size?!? From different methods or what?!?");
}
for (int i=0; i<locals.length; i++){
merge(lv, i);
}
}
/**
* Merges a single local variable.
*
* @see #merge(LocalVariables)
*/
private void merge(LocalVariables lv, int i){
try {
// We won't accept an unitialized object if we know it was initialized;
// compare vmspec2, 4.9.4, last paragraph.
if ( (!(locals[i] instanceof UninitializedObjectType)) && (lv.locals[i] instanceof UninitializedObjectType) ){
throw new StructuralCodeConstraintException(
"Backwards branch with an uninitialized object in the local variables detected.");
}
// Even harder, what about _different_ uninitialized object types?!
if ( (!(locals[i].equals(lv.locals[i]))) && (locals[i] instanceof UninitializedObjectType) &&
(lv.locals[i] instanceof UninitializedObjectType) ){
throw new StructuralCodeConstraintException(
"Backwards branch with an uninitialized object in the local variables detected.");
}
// If we just didn't know that it was initialized, we have now learned.
if (locals[i] instanceof UninitializedObjectType){
if (! (lv.locals[i] instanceof UninitializedObjectType)){
locals[i] = ((UninitializedObjectType) locals[i]).getInitialized();
}
}
if ((locals[i] instanceof ReferenceType) && (lv.locals[i] instanceof ReferenceType)){
if (! locals[i].equals(lv.locals[i])){ // needed in case of two UninitializedObjectType instances
Type sup = ((ReferenceType) locals[i]).getFirstCommonSuperclass((ReferenceType) (lv.locals[i]));
if (sup != null){
locals[i] = sup;
}
else{
// We should have checked this in Pass2!
throw new AssertionViolatedException(
"Could not load all the super classes of '"+locals[i]+"' and '"+lv.locals[i]+"'.");
}
}
}
else{
if (! (locals[i].equals(lv.locals[i])) ){
/*TODO
if ((locals[i] instanceof org.apache.commons.bcel6.generic.ReturnaddressType) &&
(lv.locals[i] instanceof org.apache.commons.bcel6.generic.ReturnaddressType)){
//System.err.println("merging "+locals[i]+" and "+lv.locals[i]);
throw new AssertionViolatedException("Merging different ReturnAddresses: '"+locals[i]+"' and '"+lv.locals[i]+"'.");
}
*/
locals[i] = Type.UNKNOWN;
}
}
} catch (ClassNotFoundException e) {
// FIXME: maybe not the best way to handle this
throw new AssertionViolatedException("Missing class: " + e, e);
}
}
/**
* Returns a String representation of this object.
*/
@Override
public String toString(){
StringBuilder sb = new StringBuilder();
for (int i=0; i<locals.length; i++){
sb.append(Integer.toString(i));
sb.append(": ");
sb.append(locals[i]);
sb.append("\n");
}
return sb.toString();
}
/**
* Replaces all occurences of u in this local variables set
* with an "initialized" ObjectType.
*/
public void initializeObject(UninitializedObjectType u){
for (int i=0; i<locals.length; i++){
if (locals[i] == u){
locals[i] = u.getInitialized();
}
}
}
}
| |
package com.gmail.irclark2000.objc;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author Isaac Clark Reformats code as needed after parsing
*
*/
public class CodeFormatter {
/**
* Marker for identifying method needing reversed pair of arguments.
*/
public static final String REVERSE_ARGS_MARKER = "ReverseArgs";
private static final String SETTER = "\n%s %s get%s() {\nreturn this.%s; \n}\n";
private static final String GETTER = "\n%s void set%s(%s %s) {\nthis.%s = %s;\n}\n";
private ArrayList<String> constructorSignalsList;
private CodeFormatterString stringFormat = new CodeFormatterString();
private CodeFormatterArrayList arrayFormat = new CodeFormatterArrayList();
private CodeFormatterMap dictionaryFormat = new CodeFormatterMap();
private CodeFormatterUserDefined userDefinedFormat = new CodeFormatterUserDefined();
@SuppressWarnings("serial")
static final Map<String, String> SIMPLESTRINGS = new HashMap<String, String>() {
{
put("NSError", "Data");
put("NSInteger", "Integer");
put("NSUInteger", "Integer");
put("YES", "true");
put("TRUE", "true");
put("NO", "false");
put("FALSE", "false");
put("NSObject", "Object");
put("self", "this");
put("nil", "null");
put("NULL", "null");
put("bool", "boolean");
put("IBAction", "void");
put("IBOutlet", "");
}
};
@SuppressWarnings("serial")
static final Map<String, String> SIMPLEFUNCTIONS = new HashMap<String, String>() {
{
put(".autorelease()", "");
put(".retain()", "");
put("NSNull.null()", "null");
}
};
CodeFormatter() {
constructorSignalsList = new ArrayList<String>();
constructorSignalsList.add("init");
}
/**
* @param id
* @param options
* @return id after reformatting to Java conventions
*/
public String identifierFormatter(String id, ParseOptions options) {
if (options.useExternalTranslations()) {
id = makeSimpleIDSubtitutions(
Translations.getTranslation(Translations.GLOBALMAPKEY,
Translations.TranslationType.ID), id);
id = makeSimpleIDSubtitutions(
Translations.getTranslation(options.getInputFileName(),
Translations.TranslationType.ID), id);
} else {
id = makeSimpleIDSubtitutions(SIMPLESTRINGS, id);
}
id = makeSimpleIDSubtitutions(options.getIdentityPairs(), id);
id = stringFormat.identifierFormatter(id, options);
id = dictionaryFormat.identifierFormatter(id, options);
id = arrayFormat.identifierFormatter(id, options);
id = userDefinedFormat.identifierFormatter(id, options);
return id;
}
/**
* @param map
* mapping of identifier to target
* @param ID
* to be substituted for
* @return re-mapped identifier
*/
public static String makeSimpleIDSubtitutions(Map<String, String> map,
String ID) {
String match = null;
if (map != null) {
match = map.get(ID);
if (match != null) {
ID = match;
}
}
return ID;
}
/**
* @param map
* keys = signatures; values = replacements
* @param code
* function call
* @return rewritten as needed
*/
public static String makeSimpleMethodSubtitutions(Map<String, String> map,
String code) {
if (map == null) {
return code;
}
for (String signature : map.keySet()) {
if (code.contains(signature)) {
String replaceCode = map.get(signature);
if (replaceCode.startsWith("+")) {
replaceCode = replaceCode.substring(1);
if (replaceCode.length() == 0) {
code = "";
} else {
int index = code.indexOf(signature)
+ signature.length();
ArrayList<String> args = getFunctionArguments(code
.substring(index - 1));
String nCode = "";
for (int i = 0; i < args.size(); i++) {
String arg = args.get(i);
if (arg.length() == 0) {
continue;
}
if (nCode.length() == 0) {
nCode = replaceCode + arg;
} else {
nCode += ", " + arg;
}
}
if (nCode.length() == 0) {
nCode = replaceCode
+ code.substring(0, code.indexOf(signature));
} else {
nCode += ", "
+ code.substring(0, code.indexOf(signature));
}
code = nCode + ")";
}
} else {
code = code.replace(signature, replaceCode);
}
break;
}
}
return code;
}
/**
* @param cd
* class description holder,
* @param cDec
* class description for getters
* @param className
* @return getters and setters for properties
*/
public ArrayList<String> generateGetters(ClassDescription cd,
ClassDescription.ClassDeclaration cDec, String className) {
ArrayList<String> code = new ArrayList<String>();
ArrayList<String> synths = cDec.getSynthesized();
for (String syn : synths) {
String type = getPropertyType(syn, className, cd, cDec);
if (type.length() > 0) {
String [] parts = type.split(":");
String getSet = makeSetGet(syn, parts[0], parts[1]);
code.add(getSet);
}
// code.add(fixDeclarations(c));
}
return code;
}
private String makeSetGet(String vName, String type, String visibility) {
String code = "";
if (visibility == null) {
// this should never happen
visibility = "";
}
String vCap = vName.substring(0, 1).toUpperCase() + vName.substring(1);
code = String.format(SETTER, visibility, type, vCap, vName);
code += String.format(GETTER, visibility, vCap, type, vName, vName, vName);
return code;
}
private String getPropertyType(String vName, String className,
ClassDescription cd, ClassDescription.ClassDeclaration cDecl) {
String type = "";
ClassDescription.ClassDeclaration cDec = null;
String prefix = "";
for (int i = 0; i < 4; i++) {
switch (i) {
case 0:
cDec = ParserObjcListener.chooseMapAndDeclaration(cd,
className, false);
prefix = "private";
break;
case 1:
cDec = ParserObjcListener.chooseMapAndDeclaration(cd,
className, true);
prefix = "public";
break;
case 2:
cDec = ParserObjcListener
.chooseMapAndDeclaration(cd, "", false);
prefix = "private";
break;
case 3:
cDec = ParserObjcListener.chooseMapAndDeclaration(cd, "", true);
prefix = "public";
break;
}
if (cDec != null) {
ArrayList<String> properties = cDec.getProperties();
for (String property : properties) {
String[] parts = property.split("[ ]+");
if (parts[parts.length - 1].equals(vName)) {
type = parts[0];
for (int j = 1; j < parts.length - 1; j++) {
type += " " + parts[j];
}
if (type.length() > 0) {
// append visibility to prefix
type = type + ":" + prefix;
}
return type;
}
}
}
}
return type;
}
/**
* @param statement
* @param options
* @return statements after applying regular expression translations
*/
public String applyRegexToStatement(String statement, ParseOptions options) {
String fName = options.getInputFileName();
statement = applyRegex(Translations.getTranslation(
Translations.GLOBALMAPKEY, Translations.TranslationType.REGEX),
statement);
statement = applyRegex(Translations.getTranslation(fName,
Translations.TranslationType.REGEX), statement);
return statement;
}
private String applyRegex(Map<String, String> transMap, String statement) {
String code = String.format("%s", statement);
if (transMap != null) {
for (String regex : transMap.keySet()) {
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(code);
if (matcher.matches()) {
code = replaceCodeWithPattern(matcher, code,
transMap.get(regex));
break;
}
}
}
return code;
}
private String replaceCodeWithPattern(Matcher matcher, String statement,
String pattern) {
String code = pattern;
return code;
}
/**
* Note that for constructor methods starting with init a flag is set to
* true allowing some additional treatment if smartConstructor is turned on
*
* @param code
* the construction declaration
* @param className
* @param options
* parsing options
* @return converts method definition into a constructor definition FIXME
* does not use signatures yet
*/
public String generateConstructor(String code, String className,
ParseOptions options) {
String proto = String.format("%s", code);
options.setConstructorMethod(false);
String[] parts = proto.split(" ");
for (String signature : options.getConstructorSignatures()) {
if (parts[1].startsWith(signature)) {
String cName = parts[0];
if (cName.equals("Object")) {
cName = className;
}
if (parts[1].equals("init")) {
proto = cName + "()";
options.setConstructorMethod(true);
} else {
int index = signature.length();
if (signature.startsWith("init")) {
options.setConstructorMethod(true);
}
if (!proto.contains("(")) {
proto = cName + "()";
} else {
while (proto.charAt(index) != '(')
index++;
proto = cName + proto.substring(index);
}
}
break;
}
}
return proto;
}
private String reformatConstructorCall(String call, ParseOptions options) {
// String proto = String.format("%s", call);
String proto = arrayFormat.reformatConstructorCall(call, options);
proto = dictionaryFormat.reformatConstructorCall(proto, options);
proto = userDefinedFormat.reformatConstructorCall(proto, options);
ArrayList<String> signatures = new ArrayList<String>();
signatures.addAll(options.getConstructorSignatures());
if (signatures.contains("init")) {
signatures.add(0, "alloc().init");
}
for (String signature : signatures) {
if (proto.contains(signature)) {
// We must be more sophisticated about init
if (signature.equals("init")) {
int indx = proto.indexOf("init") + 4;
if (indx < proto.length()) {
char c = proto.charAt(indx);
if (c != '(' && Character.isLowerCase(c)) {
// !!not a constructor
continue;
}
}
}
int start = proto.indexOf(signature) - 1;
int end = start + signature.length() - 1;
while (proto.charAt(end) != '(')
end++;
if (call.startsWith("super")) {
proto = proto.substring(0, start)
+ proto.substring(end, call.length());
} else {
proto = "new " + proto.substring(0, start)
+ proto.substring(end, call.length());
}
break;
}
}
return proto;
}
/**
* @param call
* the method call to convert
* @param options
* for converting
* @return reformatted method call
*/
public String reformatMethodCall(String call, ParseOptions options) {
// String proto = String.format("%s", call);
String proto = reformatConstructorCall(call, options);
proto = stringFormat.reformatStringFunctions(proto, options);
proto = arrayFormat.reformatArrayListFunctions(proto, options);
proto = dictionaryFormat.reformatMapFunctions(proto, options);
proto = userDefinedFormat.reformatMethodCall(proto, options);
if (!options.useExternalTranslations()) {
proto = makeSimpleMethodSubtitutions(SIMPLEFUNCTIONS, proto);
} else {
proto = makeSimpleMethodSubtitutions(Translations.getTranslation(
Translations.GLOBALMAPKEY,
Translations.TranslationType.FUNCTION), proto);
proto = makeSimpleMethodSubtitutions(Translations.getTranslation(
options.getInputFileName(),
Translations.TranslationType.FUNCTION), proto);
}
proto = fixReverseArgs(proto);
if (proto.contains("isKindOf(")) {
proto = isKindOf(proto);
}
return proto;
}
private String isKindOf(String proto) {
// proto = proto.replace("isKindOf(", "getClass().isInstance(");
// proto = proto.replace(".class", "");
proto = proto.replace("isKindOf(", "instanceof ");
proto = proto.replace(".class)", "");
return proto;
}
String fixReverseArgs(String fCall) {
String call = String.format("%s", fCall);
int start = call.indexOf(REVERSE_ARGS_MARKER);
if (start != -1) {
int startArgs = start + REVERSE_ARGS_MARKER.length();
ArrayList<String> args = getFunctionArguments(fCall
.substring(startArgs));
call = call.substring(0, startArgs + 1) + args.get(1) + ", "
+ args.get(0) + ")";
call = call.replace(REVERSE_ARGS_MARKER, "");
}
return call;
}
// make sure things like static, final, public, private are in correct order
String fixDeclarations(String decl) {
String dec = "";
String[] decParts = decl.split("[ ]+");
List<String> parts = Arrays.asList(decParts);
if (parts.contains("public")) {
dec = "public";
} else if (parts.contains("private")) {
dec = "public";
}
if (parts.contains("static")) {
if (dec.length() == 0) {
dec = "static";
} else {
dec += " static";
}
}
if (parts.contains("final")) {
if (dec.length() == 0) {
dec = "final";
} else {
dec += " final";
}
}
for (String part : parts) {
if (part.equals("public") || part.equals("private")
|| part.equals("static") || part.equals("final")) {
continue;
}
if (dec.length() == 0) {
dec = part;
} else {
dec = dec += " " + part;
}
}
return dec;
}
String tabsForLevel(int level) {
String tabs = "";
for (int i = 0; i < level; i++) {
tabs += "\t";
}
return tabs;
}
String codeIndenter(String code) {
int level = 1;
boolean insideQuote = false;
boolean insideSingleQuote = false;
boolean escape = false;
boolean addTabs = false;
StringBuffer rewrite = new StringBuffer("");
for (int i = 0; i < code.length(); i++) {
char c = code.charAt(i);
addTabs = false;
if (insideQuote || insideSingleQuote) {
if (!escape && c == '\\') {
escape = true;
} else if (escape) {
escape = false;
}
if (insideSingleQuote) {
if (c == '\'' && !escape) {
insideSingleQuote = false;
}
}
if (insideQuote) {
if (c == '\"' && !escape) {
insideQuote = false;
}
}
}
if (!insideQuote && !insideSingleQuote) {
if (c == '\n') {
addTabs = true;
} else if (c == '}') {
// appendAgain = true;
level--;
} else if (c == '{') {
level++;
}
}
if (addTabs) {
rewrite.append(c + tabsForLevel(level));
addTabs = false;
} else if (c == '}') {
// remove 1 tab
if (rewrite.charAt(rewrite.length() - 1) == '\t') {
rewrite.setCharAt(rewrite.length() - 1, c);
} else {
rewrite.append(c);
}
} else {
rewrite.append(c);
}
}
// try removing some semicolon only lines
String finalCode = rewrite.toString().replaceAll("\n[\t]+;\n", "\n");
return finalCode;
}
/**
* @param call
* @return array of args from function call
*/
public static ArrayList<String> getFunctionArguments(String call) {
return getEnclosedArguments(call, '(');
}
private static ArrayList<String> getEnclosedArguments(String call,
char openBrace) {
char closeBrace = ')';
if (openBrace == '{')
closeBrace = '}';
ArrayList<String> args = new ArrayList<String>();
boolean insideQuote = false;
boolean insideSingleQuote = false;
int parenCount = 0;
// move to starting paren
int start = 0;
String arg;
while (call.charAt(start) != openBrace)
start++;
int end = start + 1;
while (true) {
char cPrior; // the previous character
char c = call.charAt(end);
cPrior = (end == 0) ? ' ' : call.charAt(end - 1);
if (!insideQuote && !insideSingleQuote) {
if (c == ',') {
if (parenCount == 0) {
arg = call.substring(start + 1, end);
args.add(arg.trim());
start = end;
}
} else if (c == openBrace) {
parenCount++;
} else if (c == '\'' && cPrior != '\\') {
insideSingleQuote = !insideSingleQuote;
} else if (c == '\"' && cPrior != '\\') {
insideQuote = !insideQuote;
} else if (c == closeBrace) {
parenCount--;
if (parenCount < 0) {
arg = call.substring(start + 1, end);
args.add(arg.trim());
break;
}
}
end++;
} else if (insideQuote) {
if (c == '\"' && cPrior != '\\') {
insideQuote = !insideQuote;
}
end++;
} else if (insideSingleQuote) {
if (c == '\'' && cPrior != '\\') {
insideSingleQuote = !insideSingleQuote;
}
end++;
} else {
end++;
}
}
return args;
}
/**
* methDef must be pre-screen to be from an init-based constructor. Removes
* return values, and if(self) stuff that came from IOS
*
* @param methDef
* code to convert
* @return code with IOS specific stuff removed
*/
public String applyConstructorFixes(String methDef) {
String code = String.format("%s", methDef);
code = code.replace("this = super(", "super(");
code = code.replace("return this;", "");
code = code.replace("this = new this(", "this(");
if (code.contains("if(this != null){")) {
String anIf = "if(this != null){";
int index = code.indexOf(anIf) + anIf.length() - 1;
ArrayList<String> contents = getEnclosedArguments(
code.substring(index), '{');
if (contents.size() == 1) {
int start = index + contents.get(0).length();
while (code.charAt(start) != '}') {
start++;
}
code = code.substring(0, code.indexOf(anIf))
+ contents.get(0).trim() + code.substring(start + 1);
code = code.replaceAll("\n[\n]+", "\n");
}
}
return code;
}
/**
* @param conditional
* left side of assignment
* @param opCode
* operation
* @param assignExpression
* rightSide of assignment
* @return assigment statement
*/
public String assignment_expression(String conditional, String opCode,
String assignExpression) {
String statement = conditional;
boolean useEquals = false;
if (opCode != null) {
// make sure left side does not contain multiplication
String[] parts = statement.split("\\*");
// found multiplication left of equal sign?
if (parts.length == 2 && opCode.endsWith("=")) {
// get rid of "*" as it is likely pointer representation
statement = parts[0].trim() + " " + parts[1].trim() + " = "
+ assignExpression;
}
// check for a getter is present where a setter is required
else if (opCode.equals("=") && statement.endsWith("()")) {
useEquals = true;
// double check for a getter
String[] dotParts = statement.split("\\.");
// double check for a getter
if (dotParts.length > 1
&& dotParts[dotParts.length - 1].startsWith("get")) {
// convert to setter!!
useEquals = false;
for (int i = 0; i < dotParts.length - 1; i++) {
if (i == 0) {
statement = dotParts[0].trim();
} else {
statement += "." + dotParts[i].trim();
}
}
String ending = dotParts[dotParts.length - 1];
// change statement to setter
statement += ".s"
+ ending.substring(1, ending.length() - 2) + "("
+ assignExpression + ")";
} else { // not a getter use normal code probably wrong!!
useEquals = true;
statement += " " + opCode + " " + assignExpression;
}
} else { // normal assignment
if (opCode.equals("=")) {
useEquals = true;
}
statement += " " + opCode + " " + assignExpression;
}
}
if (useEquals) {
statement = reformatAssignmentStatements(statement);
}
return statement;
}
/**
* @param statement
* an assignment statement
* @return reformatted assignment statement
*/
public String reformatAssignmentStatements(String statement) {
if (statement.contains("SharedPreferences")) {
String[] parts = statement.split("=", 2);
String[] sub1 = parts[0].trim().split(" ");
String var = sub1[sub1.length - 1];
statement += ";\nEditor edit = " + var + ".edit()";
}
// TODO Auto-generated method stub
return statement;
}
@SuppressWarnings("javadoc")
public String preProcessorInstructions(String directive, String expression) {
return "";
}
/**
* @param text
* @param code
* @return code for define statement
*/
public String convertDefineToAssignment(String text, String code) {
String stmt = text + " = " + code + ";";
if (!code.isEmpty()) {
if (code.charAt(0) == '\"') {
stmt = "String " + stmt;
} else {
stmt = "Number " + stmt;
}
}
return stmt;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.builtin;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import org.joda.time.DateTime;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.pig.Expression;
import org.apache.pig.FileInputLoadFunc;
import org.apache.pig.LoadCaster;
import org.apache.pig.LoadFunc;
import org.apache.pig.LoadMetadata;
import org.apache.pig.PigException;
import org.apache.pig.ResourceSchema;
import org.apache.pig.ResourceSchema.ResourceFieldSchema;
import org.apache.pig.ResourceStatistics;
import org.apache.pig.StoreFunc;
import org.apache.pig.StoreFuncInterface;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
import org.apache.pig.backend.hadoop.datastorage.HDataStorage;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataReaderWriter;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.io.BinStorageInputFormat;
import org.apache.pig.impl.io.BinStorageOutputFormat;
import org.apache.pig.impl.io.BinStorageRecordReader;
import org.apache.pig.impl.io.BinStorageRecordWriter;
import org.apache.pig.impl.io.FileLocalizer;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.util.Utils;
/**
* Load and store data in a binary format. This class is used by Pig to move
* data between MapReduce jobs. Use of this function for storing user data is
* supported.
*/
public class BinStorage extends FileInputLoadFunc
implements StoreFuncInterface, LoadMetadata {
static class UnImplementedLoadCaster implements LoadCaster {
private static final String unImplementedErrorMessage = "Cannot cast bytes loaded from BinStorage. Please provide a custom converter.";
@Override
public DataBag bytesToBag(byte[] b, ResourceFieldSchema fieldSchema)
throws IOException {
throw new ExecException(unImplementedErrorMessage, 1118);
}
@Override
public String bytesToCharArray(byte[] b) throws IOException {
throw new ExecException(unImplementedErrorMessage, 1118);
}
@Override
public Double bytesToDouble(byte[] b) throws IOException {
throw new ExecException(unImplementedErrorMessage, 1118);
}
@Override
public Float bytesToFloat(byte[] b) throws IOException {
throw new ExecException(unImplementedErrorMessage, 1118);
}
@Override
public Integer bytesToInteger(byte[] b) throws IOException {
throw new ExecException(unImplementedErrorMessage, 1118);
}
@Override
public Long bytesToLong(byte[] b) throws IOException {
throw new ExecException(unImplementedErrorMessage, 1118);
}
@Override
public Boolean bytesToBoolean(byte[] b) throws IOException {
throw new ExecException(unImplementedErrorMessage, 1118);
}
@Override
public DateTime bytesToDateTime(byte[] b) throws IOException {
throw new ExecException(unImplementedErrorMessage, 1118);
}
@Override
public Map<String, Object> bytesToMap(byte[] b, ResourceFieldSchema fieldSchema) throws IOException {
throw new ExecException(unImplementedErrorMessage, 1118);
}
@Override
public Tuple bytesToTuple(byte[] b, ResourceFieldSchema fieldSchema)
throws IOException {
throw new ExecException(unImplementedErrorMessage, 1118);
}
@Override
public BigInteger bytesToBigInteger(byte[] b) throws IOException {
throw new ExecException(unImplementedErrorMessage, 1118);
}
@Override
public BigDecimal bytesToBigDecimal(byte[] b) throws IOException {
throw new ExecException(unImplementedErrorMessage, 1118);
}
}
Iterator<Tuple> i = null;
private static final Log mLog = LogFactory.getLog(BinStorage.class);
protected long end = Long.MAX_VALUE;
static String casterString = null;
static LoadCaster caster = null;
private BinStorageRecordReader recReader = null;
private BinStorageRecordWriter recWriter = null;
public BinStorage() {
}
// If user knows how to cast the bytes for BinStorage, provide
// the class name for the caster. When we later want to convert
// bytes to other types, BinStorage knows how. This provides a way
// for user to store intermediate data without having to explicitly
// list all the fields and figure out their parts.
public BinStorage(String casterString) {
this.casterString = casterString;
}
@Override
public Tuple getNext() throws IOException {
if(recReader.nextKeyValue()) {
return recReader.getCurrentValue();
} else {
return null;
}
}
@Override
public void putNext(Tuple t) throws IOException {
try {
recWriter.write(null, t);
} catch (InterruptedException e) {
throw new IOException(e);
}
}
public byte[] toBytes(DataBag bag) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
try {
DataReaderWriter.writeDatum(dos, bag);
} catch (Exception ee) {
int errCode = 2105;
String msg = "Error while converting bag to bytes.";
throw new ExecException(msg, errCode, PigException.BUG, ee);
}
return baos.toByteArray();
}
public byte[] toBytes(String s) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
try {
DataReaderWriter.writeDatum(dos, s);
} catch (Exception ee) {
int errCode = 2105;
String msg = "Error while converting chararray to bytes.";
throw new ExecException(msg, errCode, PigException.BUG, ee);
}
return baos.toByteArray();
}
public byte[] toBytes(Double d) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
try {
DataReaderWriter.writeDatum(dos, d);
} catch (Exception ee) {
int errCode = 2105;
String msg = "Error while converting double to bytes.";
throw new ExecException(msg, errCode, PigException.BUG, ee);
}
return baos.toByteArray();
}
public byte[] toBytes(Float f) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
try {
DataReaderWriter.writeDatum(dos, f);
} catch (Exception ee) {
int errCode = 2105;
String msg = "Error while converting float to bytes.";
throw new ExecException(msg, errCode, PigException.BUG, ee);
}
return baos.toByteArray();
}
public byte[] toBytes(Integer i) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
try {
DataReaderWriter.writeDatum(dos, i);
} catch (Exception ee) {
int errCode = 2105;
String msg = "Error while converting int to bytes.";
throw new ExecException(msg, errCode, PigException.BUG, ee);
}
return baos.toByteArray();
}
public byte[] toBytes(Long l) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
try {
DataReaderWriter.writeDatum(dos, l);
} catch (Exception ee) {
int errCode = 2105;
String msg = "Error while converting long to bytes.";
throw new ExecException(msg, errCode, PigException.BUG, ee);
}
return baos.toByteArray();
}
public byte[] toBytes(Boolean b) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
try {
DataReaderWriter.writeDatum(dos, b);
} catch (Exception ee) {
int errCode = 2105;
String msg = "Error while converting boolean to bytes.";
throw new ExecException(msg, errCode, PigException.BUG, ee);
}
return baos.toByteArray();
}
public byte[] toBytes(DateTime dt) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
try {
DataReaderWriter.writeDatum(dos, dt);
} catch (Exception ee) {
int errCode = 2105;
String msg = "Error while converting datetime to bytes.";
throw new ExecException(msg, errCode, PigException.BUG, ee);
}
return baos.toByteArray();
}
public byte[] toBytes(Map<String, Object> m) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
try {
DataReaderWriter.writeDatum(dos, m);
} catch (Exception ee) {
int errCode = 2105;
String msg = "Error while converting map to bytes.";
throw new ExecException(msg, errCode, PigException.BUG, ee);
}
return baos.toByteArray();
}
public byte[] toBytes(Tuple t) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
try {
DataReaderWriter.writeDatum(dos, t);
} catch (Exception ee) {
int errCode = 2105;
String msg = "Error while converting tuple to bytes.";
throw new ExecException(msg, errCode, PigException.BUG, ee);
}
return baos.toByteArray();
}
@Override
public InputFormat getInputFormat() {
return new BinStorageInputFormat();
}
@Override
public int hashCode() {
return 42;
}
@SuppressWarnings("unchecked")
@Override
public LoadCaster getLoadCaster() throws IOException {
if (caster == null) {
Class<LoadCaster> casterClass = null;
if (casterString!=null) {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
try {
// Try casterString as a fully qualified name
casterClass = (Class<LoadCaster>)cl.loadClass(casterString);
} catch (ClassNotFoundException e) {
}
if (casterClass==null) {
try {
// Try casterString as in builtin
casterClass = (Class<LoadCaster>)cl.loadClass("org.apache.pig.builtin." + casterString);
} catch (ClassNotFoundException e) {
throw new FrontendException("Cannot find LoadCaster class " + casterString, 1119, e);
}
}
try {
caster = casterClass.newInstance();
} catch (Exception e) {
throw new FrontendException("Cannot instantiate class " + casterString, 2259, e);
}
}
else {
caster = new UnImplementedLoadCaster();
}
}
return caster;
}
@Override
public void prepareToRead(RecordReader reader, PigSplit split) {
recReader = (BinStorageRecordReader)reader;
}
@Override
public void setLocation(String location, Job job) throws IOException {
FileInputFormat.setInputPaths(job, location);
}
@Override
public OutputFormat getOutputFormat() {
return new BinStorageOutputFormat();
}
@Override
public void prepareToWrite(RecordWriter writer) {
this.recWriter = (BinStorageRecordWriter) writer;
}
@Override
public void setStoreLocation(String location, Job job) throws IOException {
FileOutputFormat.setOutputPath(job, new Path(location));
}
@Override
public void checkSchema(ResourceSchema s) throws IOException {
}
@Override
public String relToAbsPathForStoreLocation(String location, Path curDir)
throws IOException {
return LoadFunc.getAbsolutePath(location, curDir);
}
@Override
public String[] getPartitionKeys(String location, Job job)
throws IOException {
return null;
}
@SuppressWarnings("deprecation")
@Override
public ResourceSchema getSchema(String location, Job job)
throws IOException {
Configuration conf = job.getConfiguration();
Properties props = ConfigurationUtil.toProperties(conf);
// At compile time in batch mode, the file may not exist
// (such as intermediate file). Just return null - the
// same way as we would if we did not get a valid record
String[] locations = getPathStrings(location);
for (String loc : locations) {
// since local mode now is implemented as hadoop's local mode
// we can treat either local or hadoop mode as hadoop mode - hence
// we can use HDataStorage and FileLocalizer.openDFSFile below
HDataStorage storage;
try {
storage = new HDataStorage((new org.apache.hadoop.fs.Path(loc)).toUri(), props);
} catch (RuntimeException e) {
throw new IOException(e);
}
if (!FileLocalizer.fileExists(loc, storage)) {
return null;
}
}
return Utils.getSchema(this, location, false, job);
}
@Override
public ResourceStatistics getStatistics(String location, Job job)
throws IOException {
return null;
}
@Override
public void setPartitionFilter(Expression plan) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void setStoreFuncUDFContextSignature(String signature) {
}
@Override
public void cleanupOnFailure(String location, Job job) throws IOException {
StoreFunc.cleanupOnFailureImpl(location, job);
}
@Override
public void cleanupOnSuccess(String location, Job job) throws IOException {
// DEFAULT: do nothing
}
}
| |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
import com.facebook.buck.file.ProjectFilesystemMatchers;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.rules.BuildRuleSuccessType;
import com.facebook.buck.testutil.integration.BuckBuildLog;
import com.facebook.buck.testutil.integration.DebuggableTemporaryFolder;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.util.environment.Platform;
import com.google.common.base.Optional;
import org.hamcrest.CustomTypeSafeMatcher;
import org.hamcrest.Matcher;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import java.io.IOException;
public class PrecompiledHeaderIntegrationTest {
private ProjectWorkspace workspace;
@Rule
public DebuggableTemporaryFolder tmp = new DebuggableTemporaryFolder();
@Before
public void setUp() throws IOException {
workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "precompiled_headers", tmp);
workspace.setUp();
}
@Test
public void compilesWithPrecompiledHeaders() throws IOException {
assumeTrue(Platform.detect() == Platform.MACOS);
workspace.runBuckBuild("//:some_library#default,static").assertSuccess();
findPchTarget();
}
@Test
public void pchDepFileHasReferencedHeaders() throws IOException {
assumeTrue(Platform.detect() == Platform.MACOS);
workspace.runBuckBuild("//:some_library#default,static").assertSuccess();
BuildTarget target = findPchTarget();
String depFileContents = workspace.getFileContents(
"buck-out/gen/" + target.getShortNameAndFlavorPostfix() + ".gch.dep");
assertThat(depFileContents, containsString("referenced_by_prefix_header.h"));
}
@Test
public void changingPrefixHeaderCausesRecompile() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
workspace.runBuckBuild("//:some_binary#default").assertSuccess();
workspace.resetBuildLogFile();
workspace.writeContentsToPath(
"#pragma once\n" +
"#include <stdio.h>\n" +
"#include \"referenced_by_prefix_header.h\"\n" +
"#include <referenced_by_prefix_header_from_dependency.h>\n" +
"#define FOO 100\n",
"prefix_header.h");
workspace.runBuckBuild("//:some_binary#default").assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertThat(
buildLog,
reportedTargetSuccessType(findPchTarget(), BuildRuleSuccessType.BUILT_LOCALLY));
assertThat(
buildLog,
reportedTargetSuccessType(
workspace.newBuildTarget("//:some_library#default,static"),
BuildRuleSuccessType.BUILT_LOCALLY));
}
@Test
public void changingPchReferencedHeaderFromSameTargetCausesLibraryToRecompile() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
workspace.runBuckBuild("//:some_binary#default").assertSuccess();
workspace.resetBuildLogFile();
workspace.writeContentsToPath(
"#pragma once\n#define REFERENCED_BY_PREFIX_HEADER 3\n",
"referenced_by_prefix_header.h");
workspace.runBuckBuild("//:some_binary#default").assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertThat(
buildLog,
reportedTargetSuccessType(findPchTarget(), BuildRuleSuccessType.BUILT_LOCALLY));
assertThat(
buildLog,
reportedTargetSuccessType(
workspace.newBuildTarget("//:some_library#default,static"),
BuildRuleSuccessType.BUILT_LOCALLY));
}
@Test
public void changingPchReferencedHeaderFromDependencyCausesLibraryToRecompile() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
workspace.runBuckBuild("//:some_binary#default").assertSuccess();
workspace.resetBuildLogFile();
workspace.writeContentsToPath(
"#pragma once\n#define REFERENCED_BY_PREFIX_HEADER_FROM_DEPENDENCY 3\n",
"referenced_by_prefix_header_from_dependency.h");
workspace.runBuckBuild("//:some_binary#default").assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertThat(
buildLog,
reportedTargetSuccessType(findPchTarget(), BuildRuleSuccessType.BUILT_LOCALLY));
assertThat(
buildLog,
reportedTargetSuccessType(
workspace.newBuildTarget("//:some_library#default,static"),
BuildRuleSuccessType.BUILT_LOCALLY));
}
@Test
public void touchingPchReferencedHeaderShouldNotCauseClangToRejectPch() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
workspace.runBuckBuild("//:some_binary#default").assertSuccess();
workspace.resetBuildLogFile();
// Change this file (not in the pch) to trigger recompile.
workspace.writeContentsToPath(
"int lib_func() { return 0; }",
"lib.c");
// Touch this file that contributes to the PCH without changing its contents.
workspace.writeContentsToPath(
workspace.getFileContents("referenced_by_prefix_header_from_dependency.h"),
"referenced_by_prefix_header_from_dependency.h");
workspace.runBuckBuild("//:some_binary#default").assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertThat(
"PCH should not change as no pch input file contents has changed.",
buildLog,
reportedTargetSuccessType(findPchTarget(), BuildRuleSuccessType.MATCHING_RULE_KEY));
assertThat(
buildLog,
reportedTargetSuccessType(
workspace.newBuildTarget("//:some_library#default,static"),
BuildRuleSuccessType.BUILT_LOCALLY));
}
@Test
public void changingCodeUsingPchWhenPchIsCachedButNotBuiltShouldBuildPch() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
workspace.enableDirCache();
workspace.runBuckBuild("//:some_binary#default").assertSuccess();
workspace.runBuckCommand("clean");
workspace.writeContentsToPath(
"int lib_func() { return 0; }",
"lib.c");
workspace.runBuckBuild("//:some_binary#default").assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertThat(
buildLog,
reportedTargetSuccessType(
findPchTarget(),
BuildRuleSuccessType.BUILT_LOCALLY));
assertThat(
workspace.asCell().getFilesystem(),
ProjectFilesystemMatchers.pathExists(
workspace.getPath(
"buck-out/gen/" + findPchTarget().getShortNameAndFlavorPostfix() + ".gch")));
assertThat(
buildLog,
reportedTargetSuccessType(
workspace.newBuildTarget("//:some_library#default,static"),
BuildRuleSuccessType.BUILT_LOCALLY));
}
private BuildTarget findPchTarget() throws IOException {
for (BuildTarget target : workspace.getBuildLog().getAllTargets()) {
for (Flavor flavor : target.getFlavors()) {
if (flavor.getName().startsWith("pch-")) {
return target;
}
}
}
fail("should have generated a pch target");
return null;
}
private static Matcher<BuckBuildLog> reportedTargetSuccessType(
final BuildTarget target,
final BuildRuleSuccessType successType) {
return new CustomTypeSafeMatcher<BuckBuildLog>(
"target: " + target.toString() + " with result: " + successType) {
@Override
protected boolean matchesSafely(BuckBuildLog buckBuildLog) {
return buckBuildLog.getLogEntry(target).getSuccessType().equals(Optional.of(successType));
}
};
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2010, Sun Microsystems, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.cli;
import edu.umd.cs.findbugs.annotations.CheckForNull;
import edu.umd.cs.findbugs.annotations.NonNull;
import hudson.AbortException;
import hudson.Extension;
import hudson.ExtensionList;
import hudson.ExtensionPoint;
import hudson.ExtensionPoint.LegacyInstancesAreScopedToHudson;
import hudson.Functions;
import hudson.cli.declarative.CLIMethod;
import hudson.cli.declarative.OptionHandlerExtension;
import hudson.remoting.Channel;
import hudson.security.SecurityRealm;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Type;
import java.nio.charset.Charset;
import java.util.List;
import java.util.Locale;
import java.util.UUID;
import java.util.logging.Level;
import java.util.logging.Logger;
import jenkins.model.Jenkins;
import org.apache.commons.discovery.ResourceClassIterator;
import org.apache.commons.discovery.ResourceNameIterator;
import org.apache.commons.discovery.resource.ClassLoaders;
import org.apache.commons.discovery.resource.classes.DiscoverClasses;
import org.apache.commons.discovery.resource.names.DiscoverServiceNames;
import org.jvnet.hudson.annotation_indexer.Index;
import org.jvnet.tiger_types.Types;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import org.kohsuke.args4j.CmdLineException;
import org.kohsuke.args4j.CmdLineParser;
import org.kohsuke.args4j.spi.OptionHandler;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContext;
import org.springframework.security.core.context.SecurityContextHolder;
/**
* Base class for Hudson CLI.
*
* <h2>How does a CLI command work</h2>
* <p>
* The users starts {@linkplain CLI the "CLI agent"} on a remote system, by specifying arguments, like
* {@code "java -jar jenkins-cli.jar command arg1 arg2 arg3"}. The CLI agent creates
* a connection to the server, and it sends the entire arguments to the server, along with
* the remoted stdin/out/err.
*
* <p>
* The Hudson master then picks the right {@link CLICommand} to execute, clone it, and
* calls {@link #main(List, Locale, InputStream, PrintStream, PrintStream)} method.
*
* <h2>Note for CLI command implementor</h2>
* Start with <a href="https://www.jenkins.io/doc/developer/cli/writing-cli-commands/">this document</a>
* to get the general idea of CLI.
*
* <ul>
* <li>
* Put {@link Extension} on your implementation to have it discovered by Hudson.
*
* <li>
* Use <a href="https://github.com/kohsuke/args4j">args4j</a> annotation on your implementation to define
* options and arguments (however, if you don't like that, you could override
* the {@link #main(List, Locale, InputStream, PrintStream, PrintStream)} method directly.
*
* <li>
* stdin, stdout, stderr are remoted, so proper buffering is necessary for good user experience.
*
* </ul>
*
* @author Kohsuke Kawaguchi
* @since 1.302
* @see CLIMethod
*/
@LegacyInstancesAreScopedToHudson
public abstract class CLICommand implements ExtensionPoint, Cloneable {
/**
* Connected to stdout and stderr of the CLI agent that initiated the session.
* IOW, if you write to these streams, the person who launched the CLI command
* will see the messages in his terminal.
*
* <p>
* (In contrast, calling {@code System.out.println(...)} would print out
* the message to the server log file, which is probably not what you want.
*/
public transient PrintStream stdout, stderr;
/**
* Shared text, which is reported back to CLI if an error happens in commands
* taking lists of parameters.
* @since 2.26
*/
static final String CLI_LISTPARAM_SUMMARY_ERROR_TEXT = "Error occurred while performing this command, see previous stderr output.";
/**
* Connected to stdin of the CLI agent.
*
* <p>
* This input stream is buffered to hide the latency in the remoting.
*/
public transient InputStream stdin;
/**
* @deprecated No longer used.
*/
@Deprecated
public transient Channel channel;
/**
* The locale of the client. Messages should be formatted with this resource.
*/
public transient Locale locale;
/**
* The encoding of the client, if defined.
*/
private transient @CheckForNull Charset encoding;
/**
* Set by the caller of the CLI system if the transport already provides
* authentication.
*/
private transient Authentication transportAuth;
/**
* Gets the command name.
*
* <p>
* For example, if the CLI is invoked as {@code java -jar cli.jar foo arg1 arg2 arg4},
* on the server side {@link CLICommand} that returns "foo" from {@link #getName()}
* will be invoked.
*
* <p>
* By default, this method creates "foo-bar-zot" from "FooBarZotCommand".
*/
public String getName() {
String name = getClass().getName();
name = name.substring(name.lastIndexOf('.') + 1); // short name
name = name.substring(name.lastIndexOf('$') + 1);
if (name.endsWith("Command"))
name = name.substring(0, name.length() - 7); // trim off the command
// convert "FooBarZot" into "foo-bar-zot"
// Locale is fixed so that "CreateInstance" always become "create-instance" no matter where this is run.
return name.replaceAll("([a-z0-9])([A-Z])", "$1-$2").toLowerCase(Locale.ENGLISH);
}
/**
* Gets the quick summary of what this command does.
* Used by the help command to generate the list of commands.
*/
public abstract String getShortDescription();
/**
* Entry point to the CLI command.
*
* <p>
* The default implementation uses args4j to parse command line arguments and call {@link #run()},
* but if that processing is undesirable, subtypes can directly override this method and leave {@link #run()}
* to an empty method.
* You would however then have to consider {@link #getTransportAuthentication2},
* so this is not really recommended.
*
* @param args
* Arguments to the sub command. For example, if the CLI is invoked like "java -jar cli.jar foo bar zot",
* then "foo" is the sub-command and the argument list is ["bar","zot"].
* @param locale
* Locale of the client (which can be different from that of the server.) Good behaving command implementation
* would use this locale for formatting messages.
* @param stdin
* Connected to the stdin of the CLI client.
* @param stdout
* Connected to the stdout of the CLI client.
* @param stderr
* Connected to the stderr of the CLI client.
* @return
* Exit code from the CLI command execution
* <table>
* <caption>Jenkins standard exit codes from CLI</caption>
* <tr><th>Code</th><th>Definition</th></tr>
* <tr><td>0</td><td>everything went well.</td></tr>
* <tr><td>1</td><td>further unspecified exception is thrown while performing the command.</td></tr>
* <tr><td>2</td><td>{@link CmdLineException} is thrown while performing the command.</td></tr>
* <tr><td>3</td><td>{@link IllegalArgumentException} is thrown while performing the command.</td></tr>
* <tr><td>4</td><td>{@link IllegalStateException} is thrown while performing the command.</td></tr>
* <tr><td>5</td><td>{@link AbortException} is thrown while performing the command.</td></tr>
* <tr><td>6</td><td>{@link AccessDeniedException} is thrown while performing the command.</td></tr>
* <tr><td>7</td><td>{@link BadCredentialsException} is thrown while performing the command.</td></tr>
* <tr><td>8-15</td><td>are reserved for future usage.</td></tr>
* <tr><td>16+</td><td>a custom CLI exit error code (meaning defined by the CLI command itself)</td></tr>
* </table>
* Note: For details - see JENKINS-32273
*/
public int main(List<String> args, Locale locale, InputStream stdin, PrintStream stdout, PrintStream stderr) {
this.stdin = new BufferedInputStream(stdin);
this.stdout = stdout;
this.stderr = stderr;
this.locale = locale;
registerOptionHandlers();
CmdLineParser p = getCmdLineParser();
// add options from the authenticator
SecurityContext sc = null;
Authentication old = null;
Authentication auth;
try {
// TODO as in CLIRegisterer this may be doing too much work
sc = SecurityContextHolder.getContext();
old = sc.getAuthentication();
sc.setAuthentication(auth = getTransportAuthentication2());
if (!(this instanceof HelpCommand || this instanceof WhoAmICommand))
Jenkins.get().checkPermission(Jenkins.READ);
p.parseArgument(args.toArray(new String[0]));
LOGGER.log(Level.FINE, "Invoking CLI command {0}, with {1} arguments, as user {2}.",
new Object[] {getName(), args.size(), auth.getName()});
int res = run();
LOGGER.log(Level.FINE, "Executed CLI command {0}, with {1} arguments, as user {2}, return code {3}",
new Object[] {getName(), args.size(), auth.getName(), res});
return res;
} catch (CmdLineException e) {
logFailedCommandAndPrintExceptionErrorMessage(args, e);
printUsage(stderr, p);
return 2;
} catch (IllegalStateException e) {
logFailedCommandAndPrintExceptionErrorMessage(args, e);
return 4;
} catch (IllegalArgumentException e) {
logFailedCommandAndPrintExceptionErrorMessage(args, e);
return 3;
} catch (AbortException e) {
logFailedCommandAndPrintExceptionErrorMessage(args, e);
return 5;
} catch (AccessDeniedException e) {
logFailedCommandAndPrintExceptionErrorMessage(args, e);
return 6;
} catch (BadCredentialsException e) {
// to the caller, we can't reveal whether the user didn't exist or the password didn't match.
// do that to the server log instead
String id = UUID.randomUUID().toString();
logAndPrintError(e, "Bad Credentials. Search the server log for " + id + " for more details.",
"CLI login attempt failed: " + id, Level.INFO);
return 7;
} catch (Throwable e) {
String errorMsg = "Unexpected exception occurred while performing " + getName() + " command.";
logAndPrintError(e, errorMsg, errorMsg, Level.WARNING);
Functions.printStackTrace(e, stderr);
return 1;
} finally {
if (sc != null)
sc.setAuthentication(old); // restore
}
}
private void logFailedCommandAndPrintExceptionErrorMessage(List<String> args, Throwable e) {
Authentication auth = getTransportAuthentication2();
String logMessage = String.format("Failed call to CLI command %s, with %d arguments, as user %s.",
getName(), args.size(), auth != null ? auth.getName() : "<unknown>");
logAndPrintError(e, e.getMessage(), logMessage, Level.FINE);
}
private void logAndPrintError(Throwable e, String errorMessage, String logMessage, Level logLevel) {
LOGGER.log(logLevel, logMessage, e);
this.stderr.println();
this.stderr.println("ERROR: " + errorMessage);
}
/**
* Get parser for this command.
*
* Exposed to be overridden by {@link hudson.cli.declarative.CLIRegisterer}.
* @since 1.538
*/
protected CmdLineParser getCmdLineParser() {
return new CmdLineParser(this);
}
/**
* @deprecated Specific to Remoting-based protocol.
*/
@Deprecated
public Channel checkChannel() throws AbortException {
throw new AbortException("This command is requesting the -remoting mode which is no longer supported. See https://www.jenkins.io/redirect/cli-command-requires-channel");
}
/**
* Returns the identity of the client as determined at the CLI transport level.
*
* <p>
* When the CLI connection to the server is tunneled over HTTP, that HTTP connection
* can authenticate the client, just like any other HTTP connections to the server
* can authenticate the client. This method returns that information, if one is available.
* By generalizing it, this method returns the identity obtained at the transport-level authentication.
*
* <p>
* For example, imagine if the current {@link SecurityRealm} is doing Kerberos authentication,
* then this method can return a valid identity of the client.
*
* <p>
* If the transport doesn't do authentication, this method returns {@link jenkins.model.Jenkins#ANONYMOUS2}.
* @since 2.266
*/
public Authentication getTransportAuthentication2() {
Authentication a = transportAuth;
if (a == null) a = Jenkins.ANONYMOUS2;
return a;
}
/**
* @deprecated use {@link #getTransportAuthentication2}
*/
@Deprecated
public org.acegisecurity.Authentication getTransportAuthentication() {
return org.acegisecurity.Authentication.fromSpring(getTransportAuthentication2());
}
/**
* @since 2.266
*/
public void setTransportAuth2(Authentication transportAuth) {
this.transportAuth = transportAuth;
}
/**
* @deprecated use {@link #setTransportAuth2}
*/
@Deprecated
public void setTransportAuth(org.acegisecurity.Authentication transportAuth) {
setTransportAuth2(transportAuth.toSpring());
}
/**
* Executes the command, and return the exit code.
*
* <p>
* This is an internal contract between {@link CLICommand} and its subtype.
* To execute CLI method from outside, use {@link #main(List, Locale, InputStream, PrintStream, PrintStream)}
*
* @return
* 0 to indicate a success, otherwise a custom error code.
* Error codes 1-15 shouldn;t be used in {@link #run()} as a custom error code.
* @throws Exception
* If a further unspecified exception is thrown; means: Unknown and/or unexpected issue occurred
* @throws CmdLineException
* If a wrong parameter specified, input value can't be decoded etc.
* @throws IllegalArgumentException
* If the execution can't continue due to wrong input parameter (job doesn't exist etc.)
* @throws IllegalStateException
* If the execution can't continue due to an incorrect state of Jenkins, job, build etc.
* @throws AbortException
* If the execution can't continue due to an other (rare, but foreseeable) issue
* @throws AccessDeniedException
* If the caller doesn't have sufficient rights for requested action
* @throws BadCredentialsException
* If bad credentials were provided to CLI
*/
protected abstract int run() throws Exception;
protected void printUsage(PrintStream stderr, CmdLineParser p) {
stderr.print("java -jar jenkins-cli.jar " + getName());
p.printSingleLineUsage(stderr);
stderr.println();
printUsageSummary(stderr);
p.printUsage(stderr);
}
/**
* Get single line summary as a string.
*/
@Restricted(NoExternalUse.class)
public final String getSingleLineSummary() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
getCmdLineParser().printSingleLineUsage(out);
try {
return out.toString(getClientCharset().name());
} catch (UnsupportedEncodingException e) {
throw new AssertionError(e);
}
}
/**
* Get usage as a string.
*/
@Restricted(NoExternalUse.class)
public final String getUsage() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
getCmdLineParser().printUsage(out);
try {
return out.toString(getClientCharset().name());
} catch (UnsupportedEncodingException e) {
throw new AssertionError(e);
}
}
/**
* Get long description as a string.
*/
@Restricted(NoExternalUse.class)
public final String getLongDescription() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
PrintStream ps;
try {
ps = new PrintStream(out, false, getClientCharset().name());
} catch (UnsupportedEncodingException e) {
throw new AssertionError(e);
}
printUsageSummary(ps);
ps.close();
try {
return out.toString(getClientCharset().name());
} catch (UnsupportedEncodingException e) {
throw new AssertionError(e);
}
}
/**
* Called while producing usage. This is a good method to override
* to render the general description of the command that goes beyond
* a single-line summary.
*/
protected void printUsageSummary(PrintStream stderr) {
stderr.println(getShortDescription());
}
/**
* Convenience method for subtypes to obtain the system property of the client.
* @deprecated Specific to Remoting-based protocol.
*/
@Deprecated
protected String getClientSystemProperty(String name) throws IOException, InterruptedException {
checkChannel();
return null; // never run
}
/**
* Define the encoding for the command.
* @since 2.54
*/
public void setClientCharset(@NonNull Charset encoding) {
this.encoding = encoding;
}
protected @NonNull Charset getClientCharset() {
if (encoding != null) {
return encoding;
}
// for SSH, assume the platform default encoding
// this is in-line with the standard SSH behavior
return Charset.defaultCharset();
}
/**
* Convenience method for subtypes to obtain environment variables of the client.
* @deprecated Specific to Remoting-based protocol.
*/
@Deprecated
protected String getClientEnvironmentVariable(String name) throws IOException, InterruptedException {
checkChannel();
return null; // never run
}
/**
* Creates a clone to be used to execute a command.
*/
protected CLICommand createClone() {
try {
return getClass().getDeclaredConstructor().newInstance();
} catch (NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new LinkageError(e.getMessage(), e);
}
}
/**
* Auto-discovers {@link OptionHandler}s and add them to the given command line parser.
*/
protected void registerOptionHandlers() {
try {
for (Class c : Index.list(OptionHandlerExtension.class, Jenkins.get().pluginManager.uberClassLoader, Class.class)) {
Type t = Types.getBaseClass(c, OptionHandler.class);
CmdLineParser.registerHandler(Types.erasure(Types.getTypeArgument(t, 0)), c);
}
} catch (IOException e) {
throw new Error(e);
}
}
/**
* Returns all the registered {@link CLICommand}s.
*/
public static ExtensionList<CLICommand> all() {
return ExtensionList.lookup(CLICommand.class);
}
/**
* Obtains a copy of the command for invocation.
*/
public static CLICommand clone(String name) {
for (CLICommand cmd : all())
if (name.equals(cmd.getName()))
return cmd.createClone();
return null;
}
private static final Logger LOGGER = Logger.getLogger(CLICommand.class.getName());
private static final ThreadLocal<CLICommand> CURRENT_COMMAND = new ThreadLocal<>();
/*package*/ static CLICommand setCurrent(CLICommand cmd) {
CLICommand old = getCurrent();
CURRENT_COMMAND.set(cmd);
return old;
}
/**
* If the calling thread is in the middle of executing a CLI command, return it. Otherwise null.
*/
public static CLICommand getCurrent() {
return CURRENT_COMMAND.get();
}
static {
// register option handlers that are defined
ClassLoaders cls = new ClassLoaders();
Jenkins j = Jenkins.getInstanceOrNull();
if (j != null) { // only when running on the controller
cls.put(j.getPluginManager().uberClassLoader);
ResourceNameIterator servicesIter =
new DiscoverServiceNames(cls).findResourceNames(OptionHandler.class.getName());
final ResourceClassIterator itr =
new DiscoverClasses(cls).findResourceClasses(servicesIter);
while (itr.hasNext()) {
Class h = itr.nextResourceClass().loadClass();
Class c = Types.erasure(Types.getTypeArgument(Types.getBaseClass(h, OptionHandler.class), 0));
CmdLineParser.registerHandler(c, h);
}
}
}
}
| |
/* Copyright 2004-2005 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.commons;
import grails.util.GrailsNameUtils;
import java.beans.PropertyDescriptor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.ClassUtils;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.codehaus.groovy.grails.plugins.DomainClassGrailsPlugin;
import org.codehaus.groovy.grails.validation.ConstrainedProperty;
import org.springframework.validation.Validator;
/**
* Represents a property of a domain class and contains meta information about the
* properties relationships, naming conventions and type.
*
* @author Graeme Rocher
* @since 0.1
*/
public class DefaultGrailsDomainClassProperty implements GrailsDomainClassProperty {
private GrailsDomainClass domainClass;
private boolean persistant = true; // persistant by default
private boolean identity;
private boolean oneToMany;
private String name;
private Class<?> type;
private boolean manyToMany;
private boolean manyToOne;
private boolean oneToOne;
private boolean hasOne = false;
private boolean bidirectional;
private boolean derived = false;
private Class<?> referencedPropertyType;
private GrailsDomainClass referencedDomainClass;
private GrailsDomainClassProperty otherSide;
private String naturalName;
private boolean inherited;
private int fetchMode = FETCH_LAZY;
private boolean owningSide;
private String referencePropertyName;
private boolean embedded;
private GrailsDomainClass component;
private boolean basicCollectionType;
/**
* Constructor.
* @param domainClass
* @param descriptor
*/
@SuppressWarnings("rawtypes")
public DefaultGrailsDomainClassProperty(GrailsDomainClass domainClass, PropertyDescriptor descriptor) {
this.domainClass = domainClass;
name = descriptor.getName();
naturalName = GrailsNameUtils.getNaturalName(descriptor.getName());
type = descriptor.getPropertyType();
identity = descriptor.getName().equals(IDENTITY);
// establish if property is persistant
if (domainClass != null) {
// figure out if this property is inherited
if (!domainClass.isRoot()) {
inherited = GrailsClassUtils.isPropertyInherited(domainClass.getClazz(), name);
}
List transientProps = getTransients();
checkIfTransient(transientProps);
establishFetchMode();
}
}
/**
* Evaluates the fetchmode.
*/
@SuppressWarnings("rawtypes")
private void establishFetchMode() {
Map fetchMap = domainClass.getPropertyValue(GrailsDomainClassProperty.FETCH_MODE, Map.class);
if (fetchMap != null && fetchMap.containsKey(name)) {
if ("eager".equals(fetchMap.get(name))) {
fetchMode = FETCH_EAGER;
}
}
}
/**
* Checks whether this property is transient
*
* @param transientProps The transient properties
*/
@SuppressWarnings("rawtypes")
private void checkIfTransient(List transientProps) {
if (transientProps == null) {
return;
}
for (Object currentObj : transientProps) {
// make sure its a string otherwise ignore. Note: Again maybe a warning?
if (currentObj instanceof String) {
String propertyName = (String)currentObj;
// if the property name is on the not persistant list
// then set persistant to false
if (propertyName.equals(name)) {
persistant = false;
break;
}
}
}
}
/**
* Retrieves the transient properties
*
* @return A list of transient properties
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
private List getTransients() {
List allTransientProps = new ArrayList();
List<GrailsDomainClass> allClasses = resolveAllDomainClassesInHierarchy();
for (GrailsDomainClass currentDomainClass : allClasses) {
List transientProps = currentDomainClass.getPropertyValue(TRANSIENT, List.class);
if (transientProps != null) {
allTransientProps.addAll(transientProps);
}
// Undocumented feature alert! Steve insisted on this :-)
List evanescent = currentDomainClass.getPropertyValue(EVANESCENT, List.class);
if (evanescent != null) {
allTransientProps.addAll(evanescent);
}
}
return allTransientProps;
}
/**
* returns list of current domainclass and all of its superclasses.
*
* @return
*/
private List<GrailsDomainClass> resolveAllDomainClassesInHierarchy() {
List<GrailsDomainClass> allClasses = new ArrayList<GrailsDomainClass>();
GrailsApplication application = ApplicationHolder.getApplication();
GrailsDomainClass currentDomainClass = domainClass;
while (currentDomainClass != null) {
allClasses.add(currentDomainClass);
if (application != null) {
currentDomainClass = (GrailsDomainClass)application.getArtefact(
DomainClassArtefactHandler.TYPE, currentDomainClass.getClazz().getSuperclass().getName());
}
else {
currentDomainClass = null;
}
}
return allClasses;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.domain.GrailsDomainClassProperty#getName()
*/
public String getName() {
return name;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.domain.GrailsDomainClassProperty#getType()
*/
@SuppressWarnings("rawtypes")
public Class getType() {
return type;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.domain.GrailsDomainClassProperty#isPersistant()
*/
public boolean isPersistent() {
return persistant;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.domain.GrailsDomainClassProperty#isRequired()
*/
public boolean isOptional() {
ConstrainedProperty constrainedProperty = (ConstrainedProperty) domainClass.getConstrainedProperties().get(name);
return (constrainedProperty != null) && constrainedProperty.isNullable();
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.domain.GrailsDomainClassProperty#isIdentity()
*/
public boolean isIdentity() {
return identity;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.domain.GrailsDomainClassProperty#isOneToMany()
*/
public boolean isOneToMany() {
return oneToMany;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.domain.GrailsDomainClassProperty#isManyToOne()
*/
public boolean isManyToOne() {
return manyToOne;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.domain.GrailsDomainClassProperty#getFieldName()
*/
public String getFieldName() {
return getName().toUpperCase();
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.domain.GrailsDomainClassProperty#isOneToOne()
*/
public boolean isOneToOne() {
return oneToOne;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.commons.GrailsDomainClassProperty#getDomainClass()
*/
public GrailsDomainClass getDomainClass() {
return domainClass;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.commons.GrailsDomainClassProperty#isManyToMany()
*/
public boolean isManyToMany() {
return manyToMany;
}
/**
* @param manyToMany The manyToMany to set.
*/
protected void setManyToMany(boolean manyToMany) {
this.manyToMany = manyToMany;
}
/**
* @param oneToMany The oneToMany to set.
*/
protected void setOneToMany(boolean oneToMany) {
this.oneToMany = oneToMany;
}
/**
* @param manyToOne The manyToOne to set.
*/
protected void setManyToOne(boolean manyToOne) {
this.manyToOne = manyToOne;
}
/**
* @param oneToOne The oneToOne to set.
*/
protected void setOneToOne(boolean oneToOne) {
this.oneToOne = oneToOne;
}
/**
* Set whether the foreign key is stored in the parent or child in a one-to-one
* @param isHasOne True if its stored in the parent
*/
protected void setHasOne(boolean isHasOne) {
this.hasOne = isHasOne;
}
/**
* @return True if the foreign key in a one-to-one is stored in the parent
*/
public boolean isHasOne() {
return hasOne;
}
/**
* @param persistant The persistant to set.
*/
protected void setPersistant(boolean persistant) {
this.persistant = persistant;
}
/**
* Sets whether the relationship is bidirectional or not
*/
protected void setBidirectional(boolean bidirectional) {
this.bidirectional = bidirectional;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.commons.GrailsDomainClassProperty#getTypePropertyName()
*/
public String getTypePropertyName() {
String shortTypeName = ClassUtils.getShortClassName(type);
return shortTypeName.substring(0,1).toLowerCase(Locale.ENGLISH) + shortTypeName.substring(1);
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.commons.GrailsDomainClassProperty#getReferencedPropertyType()
*/
@SuppressWarnings("rawtypes")
public Class getReferencedPropertyType() {
if (isDomainAssociation()) {
return referencedPropertyType;
}
return getType();
}
private boolean isDomainAssociation() {
return (Collection.class.isAssignableFrom(type) || Map.class.isAssignableFrom(type)) &&
referencedPropertyType != null;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.commons.GrailsDomainClassProperty#isBidirectional()
*/
public boolean isBidirectional() {
return bidirectional;
}
/**
* Sets the referenced property type of this property
*/
protected void setReferencedPropertyType(Class<?> referencedPropertyType) {
this.referencedPropertyType = referencedPropertyType;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.commons.GrailsDomainClassProperty#isAssociation()
*/
public GrailsDomainClass getReferencedDomainClass() {
return referencedDomainClass;
}
public void setReferencedDomainClass(GrailsDomainClass referencedDomainClass) {
if (referencedDomainClass != null) {
this.referencedDomainClass = referencedDomainClass;
this.referencedPropertyType = referencedDomainClass.getClazz();
}
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.commons.GrailsDomainClassProperty#isAssociation()
*/
public boolean isAssociation() {
return isOneToMany() ||
isOneToOne() ||
isManyToOne() ||
isManyToMany() ||
isEmbedded();
}
public boolean isEnum() {
return GrailsClassUtils.isJdk5Enum(getType());
}
public String getNaturalName() {
return naturalName;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
String assType = null;
if (isManyToMany()) {
assType = "many-to-many";
}
else if (isOneToMany()) {
assType = "one-to-many";
}
else if (isOneToOne()) {
assType = "one-to-one";
}
else if (isManyToOne()) {
assType = "many-to-one";
}
else if (isEmbedded()) {
assType = "embedded";
}
return new ToStringBuilder(this)
.append("name", name)
.append("type", type)
.append("persistent", isPersistent())
.append("optional", isOptional())
.append("association", isAssociation())
.append("bidirectional", isBidirectional())
.append("association-type", assType)
.toString();
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.commons.GrailsDomainClassProperty#getOtherSide()
*/
public GrailsDomainClassProperty getOtherSide() {
return otherSide;
}
public void setOtherSide(GrailsDomainClassProperty property) {
if (!equals(property)) {
setBidirectional(true);
if (isOneToOne() && property.isOneToMany()) {
setOneToOne(false);
setManyToOne(true);
}
}
otherSide = property;
}
public boolean isInherited() {
return inherited;
}
public int getFetchMode() {
return fetchMode ;
}
public boolean isOwningSide() {
return isHasOne() || owningSide;
}
public void setOwningSide(boolean b) {
owningSide = b;
}
@SuppressWarnings("unchecked")
public boolean isCircular() {
if (otherSide != null) {
if (otherSide.getDomainClass().getClazz().isAssignableFrom(domainClass.getClazz())) {
return true;
}
}
else if (getReferencedPropertyType().isAssignableFrom(domainClass.getClazz())) {
return true;
}
return false;
}
public void setReferencePropertyName(String name) {
referencePropertyName = name;
}
public String getReferencedPropertyName() {
return referencePropertyName;
}
public boolean isEmbedded() {
return embedded;
}
public GrailsDomainClass getComponent() {
return component;
}
public void setEmbedded(boolean isEmbedded) {
embedded = isEmbedded;
if (isEmbedded) {
component = new ComponentDomainClass(getType());
}
}
public boolean isDerived() {
return derived;
}
public void setDerived(boolean derived) {
this.derived = derived;
}
/**
* Overriddent equals to take into account inherited properties
* e.g. childClass.propertyName is equal to parentClass.propertyName if the types match and
* childClass.property.isInherited
*
* @param o the Object to compare this property to
* @return boolean indicating equality of the two objects
*/
@Override
public boolean equals(Object o) {
if (o == null) {
return false;
}
if (o instanceof GrailsDomainClassProperty) {
if (!super.equals(o)){
GrailsDomainClassProperty otherProp = (GrailsDomainClassProperty) o;
boolean namesMatch = otherProp.getName().equals(getName());
boolean typesMatch = otherProp.getReferencedPropertyType().equals(getReferencedPropertyType());
Class<?> myActualClass = getDomainClass().getClazz();
Class<?> otherActualClass = otherProp.getDomainClass().getClazz() ;
boolean classMatch = otherActualClass.isAssignableFrom(myActualClass) ||
myActualClass.isAssignableFrom(otherActualClass);
return namesMatch && typesMatch && classMatch;
}
return true;
}
return false;
}
public void setBasicCollectionType(boolean b) {
basicCollectionType = b;
}
public boolean isBasicCollectionType() {
return basicCollectionType;
}
@SuppressWarnings("rawtypes")
private class ComponentDomainClass extends AbstractGrailsClass implements GrailsDomainClass {
private GrailsDomainClassProperty[] properties;
private Map constraints = Collections.emptyMap();
private List transients = Collections.emptyList();
public ComponentDomainClass(Class<?> type) {
super(type, "");
PropertyDescriptor[] descriptors = getPropertyDescriptors();
List tmp = getPropertyValue(GrailsDomainClassProperty.TRANSIENT, List.class);
if (tmp != null) transients = tmp;
properties = createDomainClassProperties(descriptors);
constraints = GrailsDomainConfigurationUtil.evaluateConstraints(getClazz(), properties);
DomainClassGrailsPlugin.registerConstraintsProperty(getMetaClass(), this);
}
private GrailsDomainClassProperty[] createDomainClassProperties(PropertyDescriptor[] descriptors) {
List<DefaultGrailsDomainClassProperty> props = new ArrayList<DefaultGrailsDomainClassProperty>();
Collection<String> embeddedNames = getEmbeddedList();
for (int i = 0; i < descriptors.length; i++) {
PropertyDescriptor descriptor = descriptors[i];
if (isPersistentProperty(descriptor)) {
DefaultGrailsDomainClassProperty property = new DefaultGrailsDomainClassProperty(
this, descriptor);
props.add(property);
if (embeddedNames.contains(property.getName())) {
property.setEmbedded(true);
}
}
}
return props.toArray(new GrailsDomainClassProperty[props.size()]);
}
@SuppressWarnings("unchecked")
private Collection<String> getEmbeddedList() {
Object potentialList = GrailsClassUtils.getStaticPropertyValue(getClazz(), "embedded");
if (potentialList instanceof Collection) {
return (Collection<String>)potentialList;
}
return Collections.emptyList();
}
private boolean isPersistentProperty(PropertyDescriptor descriptor) {
String propertyName = descriptor.getName();
return GrailsDomainConfigurationUtil.isNotConfigurational(descriptor) && !transients.contains(propertyName);
}
public boolean isOwningClass(Class dc) {
return dc != null && dc.equals(getDomainClass().getClazz());
}
public GrailsDomainClassProperty[] getProperties() {
return properties;
}
/**
* @deprecated Use #getPersistentProperties instead
*/
@SuppressWarnings("dep-ann")
public GrailsDomainClassProperty[] getPersistantProperties() {
return properties;
}
public GrailsDomainClassProperty[] getPersistentProperties() {
return properties;
}
public GrailsDomainClassProperty getIdentifier() {
return null; // no identifier for embedded component
}
public GrailsDomainClassProperty getVersion() {
return null; // no version for embedded component
}
public Map getAssociationMap() {
return Collections.emptyMap();
}
public GrailsDomainClassProperty getPropertyByName(@SuppressWarnings("hiding") String name) {
for (int i = 0; i < properties.length; i++) {
GrailsDomainClassProperty property = properties[i];
if (property.getName().equals(name)) return property;
}
return null;
}
public String getFieldName(String propertyName) {
return null;
}
public boolean isOneToMany(String propertyName) {
return false;
}
public boolean isManyToOne(String propertyName) {
return false;
}
public boolean isBidirectional(String propertyName) {
return false;
}
public Class<?> getRelatedClassType(String propertyName) {
return getPropertyByName(propertyName).getReferencedPropertyType();
}
public Map getConstrainedProperties() {
return constraints;
}
public Validator getValidator() {
return null;
}
public void setValidator(Validator validator) {
// ignored
}
public String getMappingStrategy() {
return GrailsDomainClass.GORM;
}
public boolean isRoot() {
return true;
}
@SuppressWarnings("unchecked")
public Set getSubClasses() {
return Collections.emptySet();
}
public void refreshConstraints() {
GrailsDomainClassProperty[] props = getPersistentProperties();
constraints = GrailsDomainConfigurationUtil.evaluateConstraints(
getClazz(),
props);
}
public boolean hasSubClasses() {
return false;
}
public Map getMappedBy() {
return Collections.emptyMap();
}
public boolean hasPersistentProperty(String propertyName) {
for (int i = 0; i < properties.length; i++) {
GrailsDomainClassProperty persistantProperty = properties[i];
if (persistantProperty.getName().equals(propertyName)) return true;
}
return false;
}
public void setMappingStrategy(String strategy) {
// do nothing
}
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.admin.jmx.internal;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.modelmbean.ModelMBean;
import org.apache.commons.modeler.ManagedBean;
import com.gemstone.gemfire.SystemFailure;
import com.gemstone.gemfire.admin.AdminException;
import com.gemstone.gemfire.admin.SystemMemberCacheServer;
import com.gemstone.gemfire.admin.SystemMemberRegion;
import com.gemstone.gemfire.admin.internal.SystemMemberBridgeServerImpl;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.internal.LogWriterImpl;
import com.gemstone.gemfire.internal.admin.AdminBridgeServer;
import com.gemstone.gemfire.internal.admin.GemFireVM;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
/**
* MBean representation of {@link com.gemstone.gemfire.admin.SystemMemberCache}.
*
* @author Darrel Schneider
* @author Kirk Lund
* @since 3.5
*/
public class SystemMemberCacheJmxImpl
extends com.gemstone.gemfire.admin.internal.SystemMemberCacheImpl
implements com.gemstone.gemfire.admin.jmx.internal.ManagedResource {
/** The object name of this managed resource */
private ObjectName objectName;
/** collection to collect all the resources created for this member */
private final Map<String, SystemMemberRegionJmxImpl> managedRegionResourcesMap = new HashMap<String, SystemMemberRegionJmxImpl>();
private final Map<Number, SystemMemberBridgeServerJmxImpl> managedCacheServerResourcesMap = new HashMap<Number, SystemMemberBridgeServerJmxImpl>();
// -------------------------------------------------------------------------
// Constructor(s)
// -------------------------------------------------------------------------
/**
* Constructs an instance of SystemMemberCacheJmxImpl.
*
* @param vm
* The vm owning the cache this object will manage
*/
public SystemMemberCacheJmxImpl(GemFireVM vm)
throws com.gemstone.gemfire.admin.AdminException {
super(vm);
initializeMBean();
}
/** Create and register the MBean to manage this resource */
private void initializeMBean()
throws com.gemstone.gemfire.admin.AdminException {
this.mbeanName = new StringBuffer("GemFire.Cache:")
.append("name=")
.append(MBeanUtil.makeCompliantMBeanNameProperty(getName()))
.append(",id=")
.append(getId())
.append(",owner=")
.append(MBeanUtil.makeCompliantMBeanNameProperty(vm.getId().toString()))
.append(",type=Cache").toString();
this.objectName =
MBeanUtil.createMBean(this,
addDynamicAttributes(MBeanUtil.lookupManagedBean(this)));
}
// -------------------------------------------------------------------------
// Template methods overriden from superclass...
// -------------------------------------------------------------------------
/**
* Override createSystemMemberRegion by instantiating
* SystemMemberRegionJmxImpl. This instance is also added to the
* managedResources collection.
*
* @param r
* reference to Region instance for which this JMX resource is to be
* created
* @return SystemMemberRegionJmxImpl - JMX Implementation of
* SystemMemberRegion
* @throws AdminException
* if constructing SystemMemberRegionJmxImpl instance fails
*/
@Override
protected SystemMemberRegion createSystemMemberRegion(Region r)
throws com.gemstone.gemfire.admin.AdminException {
SystemMemberRegionJmxImpl managedSystemMemberRegion = null;
boolean needsRefresh = false;
synchronized (this.managedRegionResourcesMap) {
/*
* Ensuring that a single instance of System Member Region is created
* per Region.
*/
SystemMemberRegionJmxImpl managedResource = managedRegionResourcesMap.get(r.getFullPath());
if (managedResource != null) {
managedSystemMemberRegion = managedResource;
} else {
managedSystemMemberRegion = new SystemMemberRegionJmxImpl(this, r);
managedRegionResourcesMap.put(r.getFullPath(), managedSystemMemberRegion);
needsRefresh = true;
}
}
if (needsRefresh) {
managedSystemMemberRegion.refresh();
}
return managedSystemMemberRegion;
}
/**
* Creates a SystemMemberBridgeServerJmxImpl instance. This instance is also
* added to the managedResources collection.
*
* @param bridge
* reference to AdminBridgeServer for which this JMX resource is to
* be created
* @return SystemMemberBridgeServerJmxImpl - JMX Implementation of
* SystemMemberBridgeServerImpl
* @throws AdminException
* if constructing SystemMemberBridgeServerJmxImpl instance fails
*/
@Override
protected SystemMemberBridgeServerImpl
createSystemMemberBridgeServer(AdminBridgeServer bridge)
throws AdminException {
SystemMemberBridgeServerJmxImpl managedSystemMemberBridgeServer = null;
synchronized (this.managedCacheServerResourcesMap) {
/*
* Ensuring that a single instance of SystemMember BridgeServer is
* created per AdminBridgeServer.
*/
SystemMemberBridgeServerJmxImpl managedCacheServerResource = managedCacheServerResourcesMap.get(bridge.getId());
if (managedCacheServerResource != null) {
managedSystemMemberBridgeServer = managedCacheServerResource;
} else {
managedSystemMemberBridgeServer = new SystemMemberBridgeServerJmxImpl(this, bridge);
managedCacheServerResourcesMap.put(bridge.getId(), managedSystemMemberBridgeServer);
}
}
return managedSystemMemberBridgeServer;
}
// -------------------------------------------------------------------------
// Create MBean attributes for each Statistic
// -------------------------------------------------------------------------
/**
* Add MBean attribute definitions for each Statistic.
*
* @param managed the mbean definition to add attributes to
* @return a new instance of ManagedBean copied from <code>managed</code> but
* with the new attributes added
*/
ManagedBean addDynamicAttributes(ManagedBean managed)
throws com.gemstone.gemfire.admin.AdminException {
if (managed == null) {
throw new IllegalArgumentException(LocalizedStrings.SystemMemberCacheJmxImpl_MANAGEDBEAN_IS_NULL.toLocalizedString());
}
refresh(); // to get the stats...
// need to create a new instance of ManagedBean to clean the "slate"...
ManagedBean newManagedBean = new DynamicManagedBean(managed);
for (int i = 0; i < this.statistics.length; i++) {
StatisticAttributeInfo attrInfo = new StatisticAttributeInfo();
attrInfo.setName(this.statistics[i].getName());
attrInfo.setDisplayName(this.statistics[i].getName());
attrInfo.setDescription(this.statistics[i].getDescription());
attrInfo.setType("java.lang.Number");
attrInfo.setIs(false);
attrInfo.setReadable(true);
attrInfo.setWriteable(false);
attrInfo.setStat(this.statistics[i]);
newManagedBean.addAttribute(attrInfo);
}
return newManagedBean;
}
// -------------------------------------------------------------------------
// MBean Operations
// -------------------------------------------------------------------------
/**
* Returns the ObjectName of the Region for the specified path.
*
* @throws AdminException
* If no region with path <code>path</code> exists
*/
public ObjectName manageRegion(String path)
throws AdminException, MalformedObjectNameException {
try {
SystemMemberRegionJmxImpl region = null;
try {
region = (SystemMemberRegionJmxImpl) getRegion(path);
} catch (AdminException e) {
MBeanUtil.logStackTrace(LogWriterImpl.WARNING_LEVEL, e);
throw e;
}
if (region == null) {
throw new AdminException(LocalizedStrings.SystemMemberCacheJmxImpl_THIS_CACHE_DOES_NOT_CONTAIN_REGION_0.toLocalizedString(path));
} else {
return ObjectName.getInstance(region.getMBeanName());
}
} catch (RuntimeException e) {
MBeanUtil.logStackTrace(LogWriterImpl.WARNING_LEVEL, e);
throw e;
} catch (Error e) {
if (SystemFailure.isJVMFailureError(e)) {
SystemFailure.initiateFailure(e);
// If this ever returns, rethrow the error. We're poisoned
// now, so don't let this thread continue.
throw e;
}
// Whenever you catch Error or Throwable, you must also
// check for fatal JVM error (see above). However, there is
// _still_ a possibility that you are dealing with a cascading
// error condition, so you also need to check to see if the JVM
// is still usable:
SystemFailure.checkFailure();
MBeanUtil.logStackTrace(LogWriterImpl.ERROR_LEVEL, e);
throw e;
}
}
/**
* Creates a new cache server MBean and returns its
* <code>ObjectName</code>.
*
* @since 5.7
*/
public ObjectName manageCacheServer()
throws AdminException, MalformedObjectNameException {
try {
SystemMemberBridgeServerJmxImpl bridge =
(SystemMemberBridgeServerJmxImpl) addCacheServer();
return ObjectName.getInstance(bridge.getMBeanName());
} catch (AdminException e) {
MBeanUtil.logStackTrace(LogWriterImpl.WARNING_LEVEL, e);
throw e;
} catch (RuntimeException e) {
MBeanUtil.logStackTrace(LogWriterImpl.WARNING_LEVEL, e);
throw e;
} catch (Error e) {
if (SystemFailure.isJVMFailureError(e)) {
SystemFailure.initiateFailure(e);
// If this ever returns, rethrow the error. We're poisoned
// now, so don't let this thread continue.
throw e;
}
// Whenever you catch Error or Throwable, you must also
// check for fatal JVM error (see above). However, there is
// _still_ a possibility that you are dealing with a cascading
// error condition, so you also need to check to see if the JVM
// is still usable:
SystemFailure.checkFailure();
MBeanUtil.logStackTrace(LogWriterImpl.ERROR_LEVEL, e);
throw e;
}
}
/**
* Creates a new bridge server MBean and returns its
* <code>ObjectName</code>.
*
* @since 4.0
* @deprecated as of 5.7
*/
@Deprecated
public ObjectName manageBridgeServer()
throws AdminException, MalformedObjectNameException {
return manageCacheServer();
}
/**
* Returns the MBean <code>ObjectName</code>s for all cache servers
* that serve this cache to clients.
*
* @since 4.0
*/
public ObjectName[] manageCacheServers()
throws AdminException, MalformedObjectNameException {
try {
SystemMemberCacheServer[] bridges = getCacheServers();
ObjectName[] names = new ObjectName[bridges.length];
for (int i = 0; i < bridges.length; i++) {
SystemMemberBridgeServerJmxImpl bridge =
(SystemMemberBridgeServerJmxImpl) bridges[i];
names[i] = ObjectName.getInstance(bridge.getMBeanName());
}
return names;
} catch (AdminException e) {
MBeanUtil.logStackTrace(LogWriterImpl.WARNING_LEVEL, e);
throw e;
} catch (RuntimeException e) {
MBeanUtil.logStackTrace(LogWriterImpl.WARNING_LEVEL, e);
throw e;
} catch (Error e) {
if (SystemFailure.isJVMFailureError(e)) {
SystemFailure.initiateFailure(e);
// If this ever returns, rethrow the error. We're poisoned
// now, so don't let this thread continue.
throw e;
}
// Whenever you catch Error or Throwable, you must also
// check for fatal JVM error (see above). However, there is
// _still_ a possibility that you are dealing with a cascading
// error condition, so you also need to check to see if the JVM
// is still usable:
SystemFailure.checkFailure();
MBeanUtil.logStackTrace(LogWriterImpl.ERROR_LEVEL, e);
throw e;
}
}
/**
* Returns the MBean <code>ObjectName</code>s for all bridge servers
* that serve this cache.
*
* @since 4.0
* @deprecated as of 5.7
*/
@Deprecated
public ObjectName[] manageBridgeServers()
throws AdminException, MalformedObjectNameException {
return manageCacheServers();
}
// -------------------------------------------------------------------------
// ManagedResource implementation
// -------------------------------------------------------------------------
/** The name of the MBean that will manage this resource */
private String mbeanName;
/** The ModelMBean that is configured to manage this resource */
private ModelMBean modelMBean;
public String getMBeanName() {
return this.mbeanName;
}
public ModelMBean getModelMBean() {
return this.modelMBean;
}
public void setModelMBean(ModelMBean modelMBean) {
this.modelMBean = modelMBean;
}
public ObjectName getObjectName() {
return this.objectName;
}
public ManagedResourceType getManagedResourceType() {
return ManagedResourceType.SYSTEM_MEMBER_CACHE;
}
/**
* Un-registers all the statistics & cache managed resource created for this
* member. After un-registering the resource MBean instances, clears
* this.memberResources collection.
*
* Creates ConfigurationParameterJmxImpl, StatisticResourceJmxImpl and
* SystemMemberCacheJmxImpl. But cleans up only StatisticResourceJmxImpl and
* SystemMemberCacheJmxImpl which are of type ManagedResource.
*/
public void cleanupResource() {
synchronized (this.managedRegionResourcesMap) {
Collection<SystemMemberRegionJmxImpl> values = managedRegionResourcesMap.values();
for (SystemMemberRegionJmxImpl systemMemberRegionJmxImpl : values) {
MBeanUtil.unregisterMBean(systemMemberRegionJmxImpl);
}
this.managedRegionResourcesMap.clear();
}
synchronized (this.managedCacheServerResourcesMap) {
Collection<SystemMemberBridgeServerJmxImpl> values = managedCacheServerResourcesMap.values();
for (SystemMemberBridgeServerJmxImpl SystemMemberBridgeServerJmxImpl : values) {
MBeanUtil.unregisterMBean(SystemMemberBridgeServerJmxImpl);
}
this.managedCacheServerResourcesMap.clear();
}
}
/**
* Cleans up managed resources created for the region that was (created and)
* destroyed in a cache represented by this Managed Resource.
*
* @param regionPath
* path of the region that got destroyed
* @return a managed resource related to this region path
*/
public ManagedResource cleanupRegionResources(String regionPath) {
ManagedResource cleaned = null;
synchronized (this.managedRegionResourcesMap) {
Set<Entry<String, SystemMemberRegionJmxImpl>> entries = managedRegionResourcesMap.entrySet();
for (Iterator<Entry<String, SystemMemberRegionJmxImpl>> it = entries.iterator(); it.hasNext();) {
Entry<String, SystemMemberRegionJmxImpl> entry = it.next();
SystemMemberRegionJmxImpl managedResource = entry.getValue();
ObjectName objName = managedResource.getObjectName();
String pathProp = objName.getKeyProperty("path");
if (pathProp != null && pathProp.equals(regionPath)) {
cleaned = managedResource;
it.remove();
break;
}
}
}
return cleaned;
}
/**
* Checks equality of the given object with <code>this</code> based on the
* type (Class) and the MBean Name returned by <code>getMBeanName()</code>
* methods.
*
* @param obj
* object to check equality with
* @return true if the given object is if the same type and its MBean Name is
* same as <code>this</code> object's MBean Name, false otherwise
*/
@Override
public boolean equals(Object obj) {
if ( !(obj instanceof SystemMemberCacheJmxImpl) ) {
return false;
}
SystemMemberCacheJmxImpl other = (SystemMemberCacheJmxImpl) obj;
return this.getMBeanName().equals(other.getMBeanName());
}
/**
* Returns hash code for <code>this</code> object which is based on the MBean
* Name generated.
*
* @return hash code for <code>this</code> object
*/
@Override
public int hashCode() {
return this.getMBeanName().hashCode();
}
}
| |
/* Copyright (C) 2013-2014 Computer Sciences Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
package ezbake.data.jdbc;
import ezbake.base.thrift.EzSecurityToken;
import javax.inject.Provider;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Executor;
class ConnectionDecorator implements Connection {
private Connection connection;
private Provider<EzSecurityToken> tokenProvider;
public ConnectionDecorator(Connection connection, Provider<EzSecurityToken> tokenProvider) {
this.connection = connection;
this.tokenProvider = tokenProvider;
}
@Override
public Statement createStatement() throws SQLException {
return new StatementDecorator(connection.createStatement(), tokenProvider);
}
@Override
public PreparedStatement prepareStatement(String s) throws SQLException {
return new PreparedStatementDecorator(connection.prepareStatement(s), tokenProvider);
}
@Override
public CallableStatement prepareCall(String s) throws SQLException {
return new CallableStatementDecorator(connection.prepareCall(s), tokenProvider);
}
@Override
public String nativeSQL(String s) throws SQLException {
return connection.nativeSQL(s);
}
@Override
public void setAutoCommit(boolean b) throws SQLException {
connection.setAutoCommit(b);
}
@Override
public boolean getAutoCommit() throws SQLException {
return connection.getAutoCommit();
}
@Override
public void commit() throws SQLException {
connection.commit();
}
@Override
public void rollback() throws SQLException {
connection.rollback();
}
@Override
public void close() throws SQLException {
connection.close();
}
@Override
public boolean isClosed() throws SQLException {
return connection.isClosed();
}
@Override
public DatabaseMetaData getMetaData() throws SQLException {
return connection.getMetaData();
}
@Override
public void setReadOnly(boolean b) throws SQLException {
connection.setReadOnly(b);
}
@Override
public boolean isReadOnly() throws SQLException {
return connection.isReadOnly();
}
@Override
public void setCatalog(String s) throws SQLException {
connection.setCatalog(s);
}
@Override
public String getCatalog() throws SQLException {
return connection.getCatalog();
}
@Override
public void setTransactionIsolation(int i) throws SQLException {
connection.setTransactionIsolation(i);
}
@Override
public int getTransactionIsolation() throws SQLException {
return connection.getTransactionIsolation();
}
@Override
public SQLWarning getWarnings() throws SQLException {
return connection.getWarnings();
}
@Override
public void clearWarnings() throws SQLException {
connection.clearWarnings();
}
@Override
public Statement createStatement(int i, int i2) throws SQLException {
return new StatementDecorator(connection.createStatement(i, i2), tokenProvider);
}
@Override
public PreparedStatement prepareStatement(String s, int i, int i2) throws SQLException {
return new PreparedStatementDecorator(connection.prepareStatement(s, i, i2), tokenProvider);
}
@Override
public CallableStatement prepareCall(String s, int i, int i2) throws SQLException {
return new CallableStatementDecorator(connection.prepareCall(s, i, i2), tokenProvider);
}
@Override
public Map<String, Class<?>> getTypeMap() throws SQLException {
return connection.getTypeMap();
}
@Override
public void setTypeMap(Map<String, Class<?>> typeMap) throws SQLException {
connection.setTypeMap(typeMap);
}
@Override
public void setHoldability(int i) throws SQLException {
connection.setHoldability(i);
}
@Override
public int getHoldability() throws SQLException {
return connection.getHoldability();
}
@Override
public Savepoint setSavepoint() throws SQLException {
return connection.setSavepoint();
}
@Override
public Savepoint setSavepoint(String s) throws SQLException {
return connection.setSavepoint(s);
}
@Override
public void rollback(Savepoint savepoint) throws SQLException {
connection.rollback(savepoint);
}
@Override
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
connection.releaseSavepoint(savepoint);
}
@Override
public Statement createStatement(int i, int i2, int i3) throws SQLException {
return new StatementDecorator(connection.createStatement(i, i2, i3), tokenProvider);
}
@Override
public PreparedStatement prepareStatement(String s, int i, int i2, int i3) throws SQLException {
return new PreparedStatementDecorator(connection.prepareStatement(s, i, i2, i3), tokenProvider);
}
@Override
public CallableStatement prepareCall(String s, int i, int i2, int i3) throws SQLException {
return new CallableStatementDecorator(connection.prepareCall(s, i, i2, i3), tokenProvider);
}
@Override
public PreparedStatement prepareStatement(String s, int i) throws SQLException {
return new PreparedStatementDecorator(connection.prepareStatement(s, i), tokenProvider);
}
@Override
public PreparedStatement prepareStatement(String s, int[] ints) throws SQLException {
return new PreparedStatementDecorator(connection.prepareStatement(s, ints), tokenProvider);
}
@Override
public PreparedStatement prepareStatement(String s, String[] strings) throws SQLException {
return new PreparedStatementDecorator(connection.prepareStatement(s, strings), tokenProvider);
}
@Override
public Clob createClob() throws SQLException {
return connection.createClob();
}
@Override
public Blob createBlob() throws SQLException {
return connection.createBlob();
}
@Override
public NClob createNClob() throws SQLException {
return connection.createNClob();
}
@Override
public SQLXML createSQLXML() throws SQLException {
return connection.createSQLXML();
}
@Override
public boolean isValid(int i) throws SQLException {
return connection.isValid(i);
}
@Override
public void setClientInfo(String s, String s2) throws SQLClientInfoException {
connection.setClientInfo(s, s2);
}
@Override
public void setClientInfo(Properties properties) throws SQLClientInfoException {
connection.setClientInfo(properties);
}
@Override
public String getClientInfo(String s) throws SQLException {
return connection.getClientInfo(s);
}
@Override
public Properties getClientInfo() throws SQLException {
return connection.getClientInfo();
}
@Override
public Array createArrayOf(String s, Object[] objects) throws SQLException {
return connection.createArrayOf(s, objects);
}
@Override
public Struct createStruct(String s, Object[] objects) throws SQLException {
return connection.createStruct(s, objects);
}
@Override
public void setSchema(String s) throws SQLException {
connection.setSchema(s);
}
@Override
public String getSchema() throws SQLException {
return connection.getSchema();
}
@Override
public void abort(Executor executor) throws SQLException {
connection.abort(executor);
}
@Override
public void setNetworkTimeout(Executor executor, int i) throws SQLException {
connection.setNetworkTimeout(executor, i);
}
@Override
public int getNetworkTimeout() throws SQLException {
return connection.getNetworkTimeout();
}
@Override
public <T> T unwrap(Class<T> tClass) throws SQLException {
return connection.unwrap(tClass);
}
@Override
public boolean isWrapperFor(Class<?> aClass) throws SQLException {
return connection.isWrapperFor(aClass);
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.vulkan;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* See {@link VkDeviceQueueGlobalPriorityCreateInfoKHR}.
*
* <h3>Layout</h3>
*
* <pre><code>
* struct VkDeviceQueueGlobalPriorityCreateInfoEXT {
* VkStructureType sType;
* void const * pNext;
* VkQueueGlobalPriorityKHR globalPriority;
* }</code></pre>
*/
public class VkDeviceQueueGlobalPriorityCreateInfoEXT extends VkDeviceQueueGlobalPriorityCreateInfoKHR {
/**
* Creates a {@code VkDeviceQueueGlobalPriorityCreateInfoEXT} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public VkDeviceQueueGlobalPriorityCreateInfoEXT(ByteBuffer container) {
super(container);
}
/** Sets the specified value to the {@code sType} field. */
@Override
public VkDeviceQueueGlobalPriorityCreateInfoEXT sType(@NativeType("VkStructureType") int value) { nsType(address(), value); return this; }
/** Sets the {@link KHRGlobalPriority#VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR} value to the {@code sType} field. */
@Override
public VkDeviceQueueGlobalPriorityCreateInfoEXT sType$Default() { return sType(KHRGlobalPriority.VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR); }
/** Sets the specified value to the {@code pNext} field. */
@Override
public VkDeviceQueueGlobalPriorityCreateInfoEXT pNext(@NativeType("void const *") long value) { npNext(address(), value); return this; }
/** Sets the specified value to the {@code globalPriority} field. */
@Override
public VkDeviceQueueGlobalPriorityCreateInfoEXT globalPriority(@NativeType("VkQueueGlobalPriorityKHR") int value) { nglobalPriority(address(), value); return this; }
/** Initializes this struct with the specified values. */
@Override
public VkDeviceQueueGlobalPriorityCreateInfoEXT set(
int sType,
long pNext,
int globalPriority
) {
sType(sType);
pNext(pNext);
globalPriority(globalPriority);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public VkDeviceQueueGlobalPriorityCreateInfoEXT set(VkDeviceQueueGlobalPriorityCreateInfoEXT src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code VkDeviceQueueGlobalPriorityCreateInfoEXT} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static VkDeviceQueueGlobalPriorityCreateInfoEXT malloc() {
return wrap(VkDeviceQueueGlobalPriorityCreateInfoEXT.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code VkDeviceQueueGlobalPriorityCreateInfoEXT} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static VkDeviceQueueGlobalPriorityCreateInfoEXT calloc() {
return wrap(VkDeviceQueueGlobalPriorityCreateInfoEXT.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code VkDeviceQueueGlobalPriorityCreateInfoEXT} instance allocated with {@link BufferUtils}. */
public static VkDeviceQueueGlobalPriorityCreateInfoEXT create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(VkDeviceQueueGlobalPriorityCreateInfoEXT.class, memAddress(container), container);
}
/** Returns a new {@code VkDeviceQueueGlobalPriorityCreateInfoEXT} instance for the specified memory address. */
public static VkDeviceQueueGlobalPriorityCreateInfoEXT create(long address) {
return wrap(VkDeviceQueueGlobalPriorityCreateInfoEXT.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkDeviceQueueGlobalPriorityCreateInfoEXT createSafe(long address) {
return address == NULL ? null : wrap(VkDeviceQueueGlobalPriorityCreateInfoEXT.class, address);
}
/**
* Returns a new {@link VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
// -----------------------------------
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkDeviceQueueGlobalPriorityCreateInfoEXT mallocStack() { return malloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkDeviceQueueGlobalPriorityCreateInfoEXT callocStack() { return calloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkDeviceQueueGlobalPriorityCreateInfoEXT mallocStack(MemoryStack stack) { return malloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkDeviceQueueGlobalPriorityCreateInfoEXT callocStack(MemoryStack stack) { return calloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); }
/**
* Returns a new {@code VkDeviceQueueGlobalPriorityCreateInfoEXT} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static VkDeviceQueueGlobalPriorityCreateInfoEXT malloc(MemoryStack stack) {
return wrap(VkDeviceQueueGlobalPriorityCreateInfoEXT.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code VkDeviceQueueGlobalPriorityCreateInfoEXT} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static VkDeviceQueueGlobalPriorityCreateInfoEXT calloc(MemoryStack stack) {
return wrap(VkDeviceQueueGlobalPriorityCreateInfoEXT.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** An array of {@link VkDeviceQueueGlobalPriorityCreateInfoEXT} structs. */
public static class Buffer extends VkDeviceQueueGlobalPriorityCreateInfoKHR.Buffer {
private static final VkDeviceQueueGlobalPriorityCreateInfoEXT ELEMENT_FACTORY = VkDeviceQueueGlobalPriorityCreateInfoEXT.create(-1L);
/**
* Creates a new {@code VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link VkDeviceQueueGlobalPriorityCreateInfoEXT#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected VkDeviceQueueGlobalPriorityCreateInfoEXT getElementFactory() {
return ELEMENT_FACTORY;
}
/** Sets the specified value to the {@code sType} field. */
@Override
public VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer sType(@NativeType("VkStructureType") int value) { VkDeviceQueueGlobalPriorityCreateInfoEXT.nsType(address(), value); return this; }
/** Sets the {@link KHRGlobalPriority#VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR} value to the {@code sType} field. */
@Override
public VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer sType$Default() { return sType(KHRGlobalPriority.VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR); }
/** Sets the specified value to the {@code pNext} field. */
@Override
public VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer pNext(@NativeType("void const *") long value) { VkDeviceQueueGlobalPriorityCreateInfoEXT.npNext(address(), value); return this; }
/** Sets the specified value to the {@code globalPriority} field. */
@Override
public VkDeviceQueueGlobalPriorityCreateInfoEXT.Buffer globalPriority(@NativeType("VkQueueGlobalPriorityKHR") int value) { VkDeviceQueueGlobalPriorityCreateInfoEXT.nglobalPriority(address(), value); return this; }
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.