gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.hasItem;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeThat;
import com.facebook.buck.jvm.java.testutil.AbiCompilationModeTest;
import com.facebook.buck.jvm.java.testutil.Bootclasspath;
import com.facebook.buck.testutil.ProcessResult;
import com.facebook.buck.testutil.TemporaryPaths;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.testutil.integration.ZipInspector;
import com.facebook.buck.util.ExitCode;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.environment.Platform;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.jar.JarFile;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
public class JavaBinaryIntegrationTest extends AbiCompilationModeTest {
@Rule public TemporaryPaths tmp = new TemporaryPaths();
private ProjectWorkspace workspace;
@Before
public void checkPlatform() {
assumeThat(Platform.detect(), not(Platform.WINDOWS));
}
@Test
public void fatJarLoadingNativeLibraries() throws IOException {
setUpProjectWorkspaceForScenario("fat_jar");
workspace.runBuckCommand("run", "//:bin-fat").assertSuccess();
}
@Test
public void fatJarOutputIsRecorded() throws IOException, InterruptedException {
setUpProjectWorkspaceForScenario("fat_jar");
workspace.enableDirCache();
workspace.runBuckCommand("build", "//:bin-fat").assertSuccess();
workspace.runBuckCommand("clean", "--keep-cache");
Path path = workspace.buildAndReturnOutput("//:bin-fat");
workspace.getBuildLog().assertTargetWasFetchedFromCache("//:bin-fat");
assertTrue(workspace.asCell().getFilesystem().exists(path));
}
@Test
public void fatJarWithOutput() throws IOException, InterruptedException {
setUpProjectWorkspaceForScenario("fat_jar");
Path jar = workspace.buildAndReturnOutput("//:bin-output");
ProcessExecutor.Result result = workspace.runJar(jar);
assertEquals("output", result.getStdout().get().trim());
assertEquals("error", result.getStderr().get().trim());
}
@Test
public void disableCachingForBinaries() throws IOException {
setUpProjectWorkspaceForScenario("java_binary_with_blacklist");
workspace.enableDirCache();
workspace
.runBuckBuild("-c", "java.cache_binaries=false", "//:bin-no-blacklist")
.assertSuccess();
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace
.runBuckBuild("-c", "java.cache_binaries=false", "//:bin-no-blacklist")
.assertSuccess();
workspace.getBuildLog().assertTargetBuiltLocally("//:bin-no-blacklist");
}
@Test
public void javaBinaryWithProvidedDeps() throws IOException {
setUpProjectWorkspaceForScenario("java_binary_with_provided_deps");
Path binaryJar = workspace.buildAndReturnOutput("//:bin");
ZipInspector inspector = new ZipInspector(binaryJar);
inspector.assertFileExists("com/example/buck/Lib.class");
inspector.assertFileExists("com/example/buck/Dep.class");
inspector.assertFileExists("com/example/buck/ExportedDep.class");
inspector.assertFileExists("com/example/buck/DepProvidedDep.class");
inspector.assertFileDoesNotExist("com/example/buck/ProvidedDep.class");
inspector.assertFileExists("com/example/buck/ExportedProvidedDep.class");
}
@Test
public void fatJarWithExitCode() throws IOException {
setUpProjectWorkspaceForScenario("fat_jar");
workspace
.runBuckCommand("run", "//:bin-exit-code")
.assertSpecialExitCode("error", ExitCode.BUILD_ERROR);
}
@Test
public void fatJarWithVmArguments() throws IOException, InterruptedException {
setUpProjectWorkspaceForScenario("fat_jar");
ImmutableList<String> args = ImmutableList.of("-ea", "-Dfoo.bar.baz=1234", "-Xms64m");
String expected = Joiner.on("\n").join(args);
Path jar = workspace.buildAndReturnOutput("//:bin-jvm-args");
ProcessExecutor.Result result = workspace.runJar(jar, args);
assertEquals(expected, result.getStdout().get().trim());
}
@Test
public void fatJarWithAlternateJavaBin() throws IOException, InterruptedException {
setUpProjectWorkspaceForScenario("fat_jar");
Path jar = workspace.buildAndReturnOutput("//:bin-alternate-java");
String javaHomeArg = "-Dbuck.fatjar.java.home=" + tmp.getRoot();
ProcessExecutor.Result result = workspace.runJar(jar, ImmutableList.of(javaHomeArg));
assertEquals("Running java wrapper\nRunning inner jar", result.getStdout().get().trim());
}
@Test
public void jarWithMetaInfo() throws IOException {
setUpProjectWorkspaceForScenario("java_binary_with_meta_inf");
Path jar = workspace.buildAndReturnOutput("//:bin-meta-inf");
try (JarFile jarFile = new JarFile(jar.toFile())) {
assertNotNull(jarFile.getEntry("META-INF/test.txt"));
}
}
@Test
public void fatJarWithBlacklist() throws IOException {
setUpProjectWorkspaceForScenario("java_binary_with_blacklist");
Path binaryJarWithBlacklist = workspace.buildAndReturnOutput("//:bin-blacklist");
Path binaryJarWithoutBlacklist = workspace.buildAndReturnOutput("//:bin-no-blacklist");
ImmutableSet<String> commonEntries =
ImmutableSet.of(
"META-INF/", "META-INF/MANIFEST.MF", "com/", "com/example/", "com/example/B.class");
ImmutableSet<String> blacklistedEntries =
ImmutableSet.of(
"com/example/A.class",
"com/example/A$C.class",
"com/example/Alligator.class",
"com/example/A.txt");
assertEquals(
"com.example.Alligator, com.example.A and any inner classes should be removed.",
commonEntries,
new ZipInspector(binaryJarWithBlacklist).getZipFileEntries());
assertEquals(
ImmutableSet.builder().addAll(commonEntries).addAll(blacklistedEntries).build(),
new ZipInspector(binaryJarWithoutBlacklist).getZipFileEntries());
}
@Test
public void testJarWithCorruptInput() throws IOException {
setUpProjectWorkspaceForScenario("corruption");
workspace.runBuckBuild("//:simple-lib").assertSuccess();
String libJar =
workspace
.runBuckCommand("targets", "--show_output", "//:simple-lib")
.assertSuccess()
.getStdout()
.split(" ")[1]
.trim();
// Now corrupt the output jar.
Path jarPath = workspace.getPath(libJar);
byte[] bytes = Files.readAllBytes(jarPath);
for (int backOffset = 7; backOffset <= 10; backOffset++) {
bytes[bytes.length - backOffset] = 0x77;
}
Files.write(jarPath, bytes);
ProcessResult result = workspace.runBuckBuild("//:wrapper_01").assertFailure();
// Should show the rule that failed.
assertThat(result.getStderr(), containsString("//:simple-lib"));
// Should show the jar we were operating on.
assertThat(result.getStderr(), containsString(libJar));
// Should show the original exception.
assertThat(result.getStderr(), containsString("ZipError"));
}
@Test
public void testBootclasspathPathResolution() throws IOException {
String systemBootclasspath = Bootclasspath.getSystemBootclasspath();
setUpProjectWorkspaceForScenario("fat_jar");
ProcessResult result =
workspace.runBuckBuild(
"//:bin-output",
"--config",
"java.source_level=8",
"--config",
"java.target_level=8",
"--config",
String.format("java.bootclasspath-8=clowntown.jar:%s", systemBootclasspath),
"-v",
"5");
result.assertSuccess();
List<String> verboseLogs =
Splitter.on('\n').trimResults().omitEmptyStrings().splitToList(result.getStderr());
// Check the javac invocations for properly a resolved bootclasspath and that we aren't
// accidentally mixing bootclasspaths
assertThat(
verboseLogs,
hasItem(
allOf(
containsString("javac"),
containsString("-bootclasspath"),
containsString(workspace.getPath("clowntown.jar").toString()))));
}
@Test
public void testExportedProvidedDepsExcludedFromBinary() throws IOException {
setUpProjectWorkspaceForScenario("exported_provided_deps");
Path jar = workspace.buildAndReturnOutput("//:binary_without_exported_provided_dep");
try (JarFile jarFile = new JarFile(jar.toFile())) {
assertNull(jarFile.getEntry("com/test/ExportedProvidedLibraryClass.class"));
}
}
private ProjectWorkspace setUpProjectWorkspaceForScenario(String scenario) throws IOException {
workspace = TestDataHelper.createProjectWorkspaceForScenario(this, scenario, tmp);
workspace.setUp();
setWorkspaceCompilationMode(workspace);
return workspace;
}
}
| |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.partitionservice;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.client.impl.clientside.HazelcastClientInstanceImpl;
import com.hazelcast.client.test.TestHazelcastFactory;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.internal.partition.impl.InternalPartitionServiceImpl;
import com.hazelcast.nio.Address;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.PortableReader;
import com.hazelcast.nio.serialization.PortableWriter;
import com.hazelcast.partition.PartitionLostEvent;
import com.hazelcast.partition.PartitionLostListener;
import com.hazelcast.partition.PartitionLostListenerStressTest.EventCollectingPartitionLostListener;
import com.hazelcast.spi.EventRegistration;
import com.hazelcast.spi.impl.PortablePartitionLostEvent;
import com.hazelcast.spi.impl.eventservice.InternalEventService;
import com.hazelcast.spi.partition.IPartitionLostEvent;
import com.hazelcast.test.AssertTask;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import static com.hazelcast.client.impl.clientside.ClientTestUtil.getHazelcastClientInstanceImpl;
import static com.hazelcast.internal.partition.InternalPartitionService.PARTITION_LOST_EVENT_TOPIC;
import static com.hazelcast.internal.partition.InternalPartitionService.SERVICE_NAME;
import static com.hazelcast.test.HazelcastTestSupport.assertTrueEventually;
import static com.hazelcast.test.HazelcastTestSupport.getAddress;
import static com.hazelcast.test.HazelcastTestSupport.getNode;
import static com.hazelcast.test.HazelcastTestSupport.warmUpPartitions;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelTest.class})
public class ClientPartitionLostListenerTest {
private final TestHazelcastFactory hazelcastFactory = new TestHazelcastFactory();
@After
public void tearDown() {
hazelcastFactory.terminateAll();
}
@Test
public void test_partitionLostListener_registered() {
final HazelcastInstance instance = hazelcastFactory.newHazelcastInstance();
final HazelcastInstance client = hazelcastFactory.newHazelcastClient();
client.getPartitionService().addPartitionLostListener(mock(PartitionLostListener.class));
// Expected = 4 -> 1 added & 1 from {@link com.hazelcast.scheduledexecutor.impl.DistributedScheduledExecutorService}
// + 2 from map and cache ExpirationManagers
assertRegistrationsSizeEventually(instance, 4);
}
@Test
public void test_partitionLostListener_removed() {
final HazelcastInstance instance = hazelcastFactory.newHazelcastInstance();
final HazelcastInstance client = hazelcastFactory.newHazelcastClient();
final String registrationId = client.getPartitionService().addPartitionLostListener(mock(PartitionLostListener.class));
// Expected = 4 -> 1 added & 1 from {@link com.hazelcast.scheduledexecutor.impl.DistributedScheduledExecutorService}
// + 2 from map and cache ExpirationManagers
assertRegistrationsSizeEventually(instance, 4);
client.getPartitionService().removePartitionLostListener(registrationId);
// Expected = 3 -> see {@link com.hazelcast.scheduledexecutor.impl.DistributedScheduledExecutorService}
// + 2 from map and cache ExpirationManagers
assertRegistrationsSizeEventually(instance, 3);
}
@Test
public void test_partitionLostListener_invoked() {
final HazelcastInstance instance = hazelcastFactory.newHazelcastInstance();
final HazelcastInstance client = hazelcastFactory.newHazelcastClient();
warmUpPartitions(instance, client);
final EventCollectingPartitionLostListener listener = new EventCollectingPartitionLostListener();
client.getPartitionService().addPartitionLostListener(listener);
// Expected = 4 -> 1 added & 1 from {@link com.hazelcast.scheduledexecutor.impl.DistributedScheduledExecutorService}
// + 2 from map and cache ExpirationManagers
assertRegistrationsSizeEventually(instance, 4);
final InternalPartitionServiceImpl partitionService = getNode(instance).getNodeEngine().getService(SERVICE_NAME);
final int partitionId = 5;
partitionService.onPartitionLost(new IPartitionLostEvent(partitionId, 0, null));
assertPartitionLostEventEventually(listener, partitionId);
}
@Test
public void test_partitionLostListener_invoked_fromOtherNode() {
final HazelcastInstance instance1 = hazelcastFactory.newHazelcastInstance();
final HazelcastInstance instance2 = hazelcastFactory.newHazelcastInstance();
final ClientConfig clientConfig = new ClientConfig();
clientConfig.getNetworkConfig().setSmartRouting(false);
final HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig);
warmUpPartitions(instance1, instance2, client);
final HazelcastClientInstanceImpl clientInstanceImpl = getHazelcastClientInstanceImpl(client);
final Address clientOwnerAddress = clientInstanceImpl.getConnectionManager().getOwnerConnectionAddress();
final HazelcastInstance other = getAddress(instance1).equals(clientOwnerAddress) ? instance2 : instance1;
final EventCollectingPartitionLostListener listener = new EventCollectingPartitionLostListener();
client.getPartitionService().addPartitionLostListener(listener);
// Expected = 2 -> 1 added & 1 from {@link com.hazelcast.scheduledexecutor.impl.DistributedScheduledExecutorService}
// + 2 from map and cache ExpirationManagers * instances
assertRegistrationsSizeEventually(instance1, 7);
assertRegistrationsSizeEventually(instance2, 7);
final InternalPartitionServiceImpl partitionService = getNode(other).getNodeEngine().getService(SERVICE_NAME);
final int partitionId = 5;
partitionService.onPartitionLost(new IPartitionLostEvent(partitionId, 0, null));
assertPartitionLostEventEventually(listener, partitionId);
}
private void assertRegistrationsSizeEventually(final HazelcastInstance instance, final int size) {
assertTrueEventually(new AssertTask() {
@Override
public void run()
throws Exception {
final InternalEventService eventService = getNode(instance).getNodeEngine().getEventService();
final Collection<EventRegistration> registrations = eventService
.getRegistrations(SERVICE_NAME, PARTITION_LOST_EVENT_TOPIC);
assertEquals(size, registrations.size());
}
});
}
private void assertPartitionLostEventEventually(final EventCollectingPartitionLostListener listener, final int partitionId) {
assertTrueEventually(new AssertTask() {
@Override
public void run()
throws Exception {
final List<PartitionLostEvent> events = listener.getEvents();
assertFalse(events.isEmpty());
assertEquals(partitionId, events.get(0).getPartitionId());
}
});
}
@Test
public void test_portableMapPartitionLostEvent_serialization()
throws IOException {
final Address source = new Address();
final PortablePartitionLostEvent event = new PortablePartitionLostEvent(1, 2, source);
final PortableWriter writer = mock(PortableWriter.class);
final ObjectDataOutput output = mock(ObjectDataOutput.class);
when(writer.getRawDataOutput()).thenReturn(output);
event.writePortable(writer);
verify(writer).writeInt("p", 1);
verify(writer).writeInt("l", 2);
verify(output).writeObject(source);
}
@Test
public void test_portableMapPartitionLostEvent_deserialization()
throws IOException {
final Address source = new Address();
final PortablePartitionLostEvent event = new PortablePartitionLostEvent();
final PortableReader reader = mock(PortableReader.class);
final ObjectDataInput input = mock(ObjectDataInput.class);
when(reader.getRawDataInput()).thenReturn(input);
when(reader.readInt("p")).thenReturn(1);
when(reader.readInt("l")).thenReturn(2);
when(input.readObject()).thenReturn(source);
event.readPortable(reader);
assertEquals(1, event.getPartitionId());
assertEquals(2, event.getLostBackupCount());
assertEquals(source, event.getSource());
}
}
| |
package com.jasonrobinson.racer.ui.race;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.view.MenuItemCompat;
import android.support.v4.view.ViewPager;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import com.astuetz.PagerSlidingTabStrip;
import com.jasonrobinson.racer.R;
import com.jasonrobinson.racer.adapter.RaceGridPagerAdapter;
import com.jasonrobinson.racer.adapter.RaceListPagerAdapter;
import com.jasonrobinson.racer.async.RaceAsyncTask;
import com.jasonrobinson.racer.enumeration.RaceOptions;
import com.jasonrobinson.racer.model.Race;
import com.jasonrobinson.racer.model.RaceMode;
import com.jasonrobinson.racer.ui.base.BaseActivity;
import com.jasonrobinson.racer.ui.ladder.LadderActivity;
import com.jasonrobinson.racer.ui.race.RaceListFragment.RacesCallback;
import com.jasonrobinson.racer.ui.web.WebActivity;
import com.jasonrobinson.racer.util.DepthPageTransformer;
import com.metova.slim.annotation.Layout;
import java.util.ArrayList;
import java.util.List;
import butterknife.InjectView;
@Layout(R.layout.races_activity)
public class RacesActivity extends BaseActivity implements RacesCallback {
private static final long FETCH_INTERVAL = 1000 * 60 * 60 * 24; // 24 hours
@InjectView(R.id.tabs)
PagerSlidingTabStrip mTabs;
@InjectView(R.id.pager)
ViewPager mPager;
RaceListPagerAdapter mListAdapter;
RaceGridPagerAdapter mGridAdapter;
RacesTask mRacesTask;
boolean mRefreshing;
RaceMode mRaceMode;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Toolbar toolbar = new Toolbar(this);
// toolbar.setTitle(R.string.races);
//
// setSupportActionBar(toolbar);
List<RaceListPagerAdapter.RaceListParams> params = new ArrayList<RaceListPagerAdapter.RaceListParams>();
params.add(new RaceListPagerAdapter.RaceListParams(RaceOptions.UNFINISHED, getString(R.string.upcoming)));
params.add(new RaceListPagerAdapter.RaceListParams(RaceOptions.FINISHED, getString(R.string.finished)));
mListAdapter = new RaceListPagerAdapter(getSupportFragmentManager(), params);
mRaceMode = getSettingsManager().getRaceMode();
if (mRaceMode == RaceMode.LIST) {
showList();
} else if (mRaceMode == RaceMode.CALENDAR) {
showGrid();
}
mPager.setPageTransformer(true, new DepthPageTransformer());
mTabs.setViewPager(mPager);
long lastFetch = getSettingsManager().getLastRaceFetch();
long now = System.currentTimeMillis();
if (now - lastFetch >= FETCH_INTERVAL) {
fetchRaces();
}
}
@Override
protected void onDestroy() {
super.onDestroy();
if (mRacesTask != null) {
mRacesTask.cancel(true);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
getMenuInflater().inflate(R.menu.refresh_menu, menu);
// TODO: Implement calendar view
// getMenuInflater().inflate(R.menu.races_menu, menu);
if (!mRefreshing) {
MenuItem refreshItem = menu.findItem(R.id.menu_refresh);
MenuItemCompat.setActionView(refreshItem, null);
}
if (mRaceMode == RaceMode.LIST) {
menu.removeItem(R.id.menu_list);
} else if (mRaceMode == RaceMode.CALENDAR) {
menu.removeItem(R.id.menu_grid);
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_refresh:
fetchRaces();
break;
case R.id.menu_list:
showList();
break;
case R.id.menu_grid:
showGrid();
break;
default:
return super.onOptionsItemSelected(item);
}
return true;
}
@Override
public void showUrl(String url) {
Intent intent = new Intent(this, WebActivity.class);
intent.putExtra(WebActivity.EXTRA_URL, url);
startActivity(intent);
}
@Override
public void showLadder(Race race) {
Intent intent = new Intent(this, LadderActivity.class);
intent.putExtra(LadderActivity.EXTRA_ID, race.getRaceId());
startActivity(intent);
}
private void showList() {
mRaceMode = RaceMode.LIST;
mPager.setAdapter(mListAdapter);
mTabs.setVisibility(View.VISIBLE);
setRaceMode(RaceMode.LIST);
}
private void showGrid() {
mPager.setAdapter(mGridAdapter);
mTabs.setVisibility(View.GONE);
setRaceMode(RaceMode.CALENDAR);
}
private void setRaceMode(RaceMode mode) {
mRaceMode = mode;
getSettingsManager().setRaceMode(mRaceMode);
supportInvalidateOptionsMenu();
}
private void fetchRaces() {
if (mRacesTask != null) {
mRacesTask.cancel(true);
}
mRacesTask = new RacesTask();
mRacesTask.execute();
}
private void setRefreshing(boolean refreshing) {
mRefreshing = refreshing;
supportInvalidateOptionsMenu();
}
private class RacesTask extends RaceAsyncTask {
public RacesTask() {
super(false);
}
@Override
protected void onPreExecute() {
super.onPreExecute();
setRefreshing(true);
}
@Override
protected void onPostExecute(List<Race> result) {
super.onPostExecute(result);
setRefreshing(false);
getSettingsManager().updateLastRaceFetch();
List<Fragment> fragments = getSupportFragmentManager().getFragments();
for (Fragment fragment : fragments) {
if (fragment instanceof RaceListFragment && fragment.isVisible()) {
((RaceListFragment) fragment).refresh();
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hbase;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.controller.ControllerService;
import org.apache.nifi.hbase.put.PutColumn;
import org.apache.nifi.hbase.put.PutFlowFile;
import org.apache.nifi.hbase.scan.Column;
import org.apache.nifi.hbase.scan.ResultHandler;
import org.apache.nifi.hbase.validate.ConfigFilesValidator;
import org.apache.nifi.processor.util.StandardValidators;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
@Tags({"hbase", "client"})
@CapabilityDescription("A controller service for accessing an HBase client.")
public interface HBaseClientService extends ControllerService {
PropertyDescriptor HADOOP_CONF_FILES = new PropertyDescriptor.Builder()
.name("Hadoop Configuration Files")
.description("Comma-separated list of Hadoop Configuration files," +
" such as hbase-site.xml and core-site.xml for kerberos, " +
"including full paths to the files.")
.addValidator(new ConfigFilesValidator())
.build();
PropertyDescriptor ZOOKEEPER_QUORUM = new PropertyDescriptor.Builder()
.name("ZooKeeper Quorum")
.description("Comma-separated list of ZooKeeper hosts for HBase. Required if Hadoop Configuration Files are not provided.")
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
PropertyDescriptor ZOOKEEPER_CLIENT_PORT = new PropertyDescriptor.Builder()
.name("ZooKeeper Client Port")
.description("The port on which ZooKeeper is accepting client connections. Required if Hadoop Configuration Files are not provided.")
.addValidator(StandardValidators.PORT_VALIDATOR)
.build();
PropertyDescriptor ZOOKEEPER_ZNODE_PARENT = new PropertyDescriptor.Builder()
.name("ZooKeeper ZNode Parent")
.description("The ZooKeeper ZNode Parent value for HBase (example: /hbase). Required if Hadoop Configuration Files are not provided.")
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
PropertyDescriptor HBASE_CLIENT_RETRIES = new PropertyDescriptor.Builder()
.name("HBase Client Retries")
.description("The number of times the HBase client will retry connecting. Required if Hadoop Configuration Files are not provided.")
.addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
.defaultValue("1")
.build();
PropertyDescriptor PHOENIX_CLIENT_JAR_LOCATION = new PropertyDescriptor.Builder()
.name("Phoenix Client JAR Location")
.description("The full path to the Phoenix client JAR. Required if Phoenix is installed on top of HBase.")
.addValidator(StandardValidators.FILE_EXISTS_VALIDATOR)
.expressionLanguageSupported(true)
.dynamicallyModifiesClasspath(true)
.build();
/**
* Puts a batch of mutations to the given table.
*
* @param tableName the name of an HBase table
* @param puts a list of put mutations for the given table
* @throws IOException thrown when there are communication errors with HBase
*/
void put(String tableName, Collection<PutFlowFile> puts) throws IOException;
/**
* Puts the given row to HBase with the provided columns.
*
* @param tableName the name of an HBase table
* @param rowId the id of the row to put
* @param columns the columns of the row to put
* @throws IOException thrown when there are communication errors with HBase
*/
void put(String tableName, byte[] rowId, Collection<PutColumn> columns) throws IOException;
/**
* Atomically checks if a row/family/qualifier value matches the expected value. If it does, then the Put is added to HBase.
*
* @param tableName the name of an HBase table
* @param rowId the id of the row to check
* @param family the family of the row to check
* @param qualifier the qualifier of the row to check
* @param value the value of the row to check. If null, the check is for the lack of column (ie: non-existence)
* @return True if the Put was executed, false otherwise
* @throws IOException thrown when there are communication errors with HBase$
*/
boolean checkAndPut(String tableName, byte[] rowId, byte[] family, byte[] qualifier, byte[] value, PutColumn column) throws IOException;
/**
* Deletes the given row on HBase. All cells are deleted.
*
* @param tableName the name of an HBase table
* @param rowId the id of the row to delete
* @throws IOException thrown when there are communication errors with HBase
*/
void delete(String tableName, byte[] rowId) throws IOException;
/**
* Deletes a list of rows in HBase. All cells are deleted.
*
* @param tableName the name of an HBase table
* @param rowIds a list of rowIds to send in a batch delete
*/
void delete(String tableName, List<byte[]> rowIds) throws IOException;
/**
* Scans the given table using the optional filter criteria and passing each result to the provided handler.
*
* @param tableName the name of an HBase table to scan
* @param columns optional columns to return, if not specified all columns are returned
* @param filterExpression optional filter expression, if not specified no filtering is performed
* @param minTime the minimum timestamp of cells to return, passed to the HBase scanner timeRange
* @param handler a handler to process rows of the result set
* @throws IOException thrown when there are communication errors with HBase
*/
void scan(String tableName, Collection<Column> columns, String filterExpression, long minTime, ResultHandler handler) throws IOException;
/**
* Scans the given table for the given rowId and passes the result to the handler.
*
* @param tableName the name of an HBase table to scan
* @param startRow the row identifier to start scanning at
* @param endRow the row identifier to end scanning at
* @param columns optional columns to return, if not specified all columns are returned
* @param handler a handler to process rows of the result
* @throws IOException thrown when there are communication errors with HBase
*/
void scan(String tableName, byte[] startRow, byte[] endRow, Collection<Column> columns, ResultHandler handler) throws IOException;
/**
* Scans the given table for the given range of row keys or time rage and passes the result to a handler.<br/>
*
* @param tableName the name of an HBase table to scan
* @param startRow the row identifier to start scanning at
* @param endRow the row identifier to end scanning at
* @param filterExpression optional filter expression, if not specified no filtering is performed
* @param timerangeMin the minimum timestamp of cells to return, passed to the HBase scanner timeRange
* @param timerangeMax the maximum timestamp of cells to return, passed to the HBase scanner timeRange
* @param limitRows the maximum number of rows to be returned by scanner
* @param isReversed whether this scan is a reversed one.
* @param columns optional columns to return, if not specified all columns are returned
* @param handler a handler to process rows of the result
*/
void scan(String tableName, String startRow, String endRow, String filterExpression, Long timerangeMin, Long timerangeMax, Integer limitRows,
Boolean isReversed, Collection<Column> columns, ResultHandler handler) throws IOException;
/**
* Converts the given boolean to it's byte representation.
*
* @param b a boolean
* @return the boolean represented as bytes
*/
byte[] toBytes(boolean b);
/**
* Converts the given float to its byte representation.
*
* @param f a float
* @return the float represented as bytes
*/
byte[] toBytes(float f);
/**
* Converts the given float to its byte representation.
*
* @param i an int
* @return the int represented as bytes
*/
byte[] toBytes(int i);
/**
* Converts the given long to it's byte representation.
*
* @param l a long
* @return the long represented as bytes
*/
byte[] toBytes(long l);
/**
* Converts the given double to it's byte representation.
*
* @param d a double
* @return the double represented as bytes
*/
byte[] toBytes(double d);
/**
* Converts the given string to it's byte representation.
*
* @param s a string
* @return the string represented as bytes
*/
byte[] toBytes(String s);
/**
* Converts the given binary formatted string to a byte representation
* @param s a binary encoded string
* @return the string represented as bytes
*/
byte[] toBytesBinary(String s);
/**
* Create a transit URI from the current configuration and the specified table name.
* The default implementation just prepend "hbase://" to the table name and row key, i.e. "hbase://tableName/rowKey".
* @param tableName The name of a HBase table
* @param rowKey The target HBase row key, this can be null or empty string if the operation is not targeted to a specific row
* @return a qualified transit URI which can identify a HBase table row in a HBase cluster
*/
default String toTransitUri(String tableName, String rowKey) {
return "hbase://" + tableName + (rowKey != null && !rowKey.isEmpty() ? "/" + rowKey : "");
}
}
| |
//========================================================================
//
//File: GraphicalEditor.java
//
//========================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//========================================================================
//
package org.xtuml.bp.ui.graphics.editor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IConfigurationElement;
import org.eclipse.core.runtime.IExtension;
import org.eclipse.core.runtime.IExtensionPoint;
import org.eclipse.core.runtime.IExtensionRegistry;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Platform;
import org.eclipse.draw2d.FigureCanvas;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.PositionConstants;
import org.eclipse.draw2d.ToolTipHelper;
import org.eclipse.draw2d.geometry.Dimension;
import org.eclipse.draw2d.geometry.Point;
import org.eclipse.draw2d.geometry.Rectangle;
import org.eclipse.draw2d.text.FlowPage;
import org.eclipse.draw2d.text.TextFlow;
import org.eclipse.gef.ContextMenuProvider;
import org.eclipse.gef.DefaultEditDomain;
import org.eclipse.gef.EditDomain;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.GraphicalEditPart;
import org.eclipse.gef.GraphicalViewer;
import org.eclipse.gef.KeyHandler;
import org.eclipse.gef.MouseWheelHandler;
import org.eclipse.gef.MouseWheelZoomHandler;
import org.eclipse.gef.SnapToGrid;
import org.eclipse.gef.Tool;
import org.eclipse.gef.editparts.AbstractGraphicalEditPart;
import org.eclipse.gef.editparts.ZoomListener;
import org.eclipse.gef.editparts.ZoomManager;
import org.eclipse.gef.palette.ConnectionCreationToolEntry;
import org.eclipse.gef.palette.PaletteDrawer;
import org.eclipse.gef.palette.PaletteGroup;
import org.eclipse.gef.palette.PaletteRoot;
import org.eclipse.gef.palette.PanningSelectionToolEntry;
import org.eclipse.gef.palette.ToolEntry;
import org.eclipse.gef.print.PrintGraphicalViewerOperation;
import org.eclipse.gef.tools.AbstractTool;
import org.eclipse.gef.ui.actions.ActionRegistry;
import org.eclipse.gef.ui.actions.AlignmentAction;
import org.eclipse.gef.ui.actions.DirectEditAction;
import org.eclipse.gef.ui.actions.MatchHeightAction;
import org.eclipse.gef.ui.actions.MatchWidthAction;
import org.eclipse.gef.ui.actions.SelectAllAction;
import org.eclipse.gef.ui.actions.ZoomInAction;
import org.eclipse.gef.ui.actions.ZoomOutAction;
import org.eclipse.gef.ui.palette.FlyoutPaletteComposite;
import org.eclipse.gef.ui.palette.FlyoutPaletteComposite.FlyoutPreferences;
import org.eclipse.gef.ui.parts.DomainEventDispatcher;
import org.eclipse.gef.ui.parts.GraphicalEditorWithFlyoutPalette;
import org.eclipse.gef.ui.parts.GraphicalViewerKeyHandler;
import org.eclipse.gef.ui.parts.ScrollingGraphicalViewer;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.dialogs.IDialogSettings;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.preference.JFacePreferences;
import org.eclipse.jface.resource.JFaceResources;
import org.eclipse.jface.util.IPropertyChangeListener;
import org.eclipse.jface.util.PropertyChangeEvent;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ControlEvent;
import org.eclipse.swt.events.ControlListener;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.FocusListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.printing.PrintDialog;
import org.eclipse.swt.printing.Printer;
import org.eclipse.swt.printing.PrinterData;
import org.eclipse.swt.widgets.Canvas;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.IEditorInput;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IEditorReference;
import org.eclipse.ui.IEditorSite;
import org.eclipse.ui.IPartListener;
import org.eclipse.ui.IPartListener2;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.IWorkbenchPart;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.actions.ActionFactory;
import org.eclipse.ui.part.FileEditorInput;
import org.eclipse.ui.views.contentoutline.IContentOutlinePage;
import org.eclipse.ui.views.properties.IPropertySheetPage;
import org.eclipse.ui.views.properties.PropertySheetPage;
import org.osgi.framework.Bundle;
import org.xtuml.bp.core.ActionHome_c;
import org.xtuml.bp.core.Action_c;
import org.xtuml.bp.core.Component_c;
import org.xtuml.bp.core.CorePlugin;
import org.xtuml.bp.core.ModelClass_c;
import org.xtuml.bp.core.MooreActionHome_c;
import org.xtuml.bp.core.Ooaofooa;
import org.xtuml.bp.core.Package_c;
import org.xtuml.bp.core.PackageableElement_c;
import org.xtuml.bp.core.StateMachineState_c;
import org.xtuml.bp.core.common.BridgePointPreferencesStore;
import org.xtuml.bp.core.common.ClassQueryInterface_c;
import org.xtuml.bp.core.common.ModelRoot;
import org.xtuml.bp.core.common.NonRootModelElement;
import org.xtuml.bp.core.common.PersistableModelComponent;
import org.xtuml.bp.core.common.PersistenceManager;
import org.xtuml.bp.core.common.Transaction;
import org.xtuml.bp.core.common.TransactionException;
import org.xtuml.bp.core.common.TransactionManager;
import org.xtuml.bp.core.ui.RenameAction;
import org.xtuml.bp.core.ui.Selection;
import org.xtuml.bp.core.util.EditorUtil;
import org.xtuml.bp.core.util.HierarchyUtil;
import org.xtuml.bp.ui.canvas.CanvasPlugin;
import org.xtuml.bp.ui.canvas.Cl_c;
import org.xtuml.bp.ui.canvas.Connector_c;
import org.xtuml.bp.ui.canvas.Diagram_c;
import org.xtuml.bp.ui.canvas.ElementSpecification_c;
import org.xtuml.bp.ui.canvas.FloatingText_c;
import org.xtuml.bp.ui.canvas.Gr_c;
import org.xtuml.bp.ui.canvas.Graphelement_c;
import org.xtuml.bp.ui.canvas.GraphicalElement_c;
import org.xtuml.bp.ui.canvas.ModelTool_c;
import org.xtuml.bp.ui.canvas.Model_c;
import org.xtuml.bp.ui.canvas.Ooaofgraphics;
import org.xtuml.bp.ui.canvas.Ooatype_c;
import org.xtuml.bp.ui.canvas.Shape_c;
import org.xtuml.bp.ui.canvas.util.GraphicsUtil;
import org.xtuml.bp.ui.graphics.Activator;
import org.xtuml.bp.ui.graphics.actions.CanvasCopyAction;
import org.xtuml.bp.ui.graphics.actions.CanvasCutAction;
import org.xtuml.bp.ui.graphics.actions.CanvasPasteAction;
import org.xtuml.bp.ui.graphics.actions.GraphicalActionConstants;
import org.xtuml.bp.ui.graphics.factories.ConnectorCreationFactory;
import org.xtuml.bp.ui.graphics.factories.ShapeCreationFactory;
import org.xtuml.bp.ui.graphics.figures.DecoratedPolylineConnection;
import org.xtuml.bp.ui.graphics.figures.ShapeImageFigure;
import org.xtuml.bp.ui.graphics.listeners.GraphicsEditorListener;
import org.xtuml.bp.ui.graphics.outline.GraphicalOutlinePage;
import org.xtuml.bp.ui.graphics.palette.GraphicsConnectionCreationToolEntry;
import org.xtuml.bp.ui.graphics.palette.GraphicsCreationToolEntry;
import org.xtuml.bp.ui.graphics.palette.ZoomToolEntry;
import org.xtuml.bp.ui.graphics.parts.GraphicalZoomManager;
import org.xtuml.bp.ui.graphics.parts.GraphicsEditPartFactory;
import org.xtuml.bp.ui.graphics.parts.GraphicsScalableFreeformEditPart;
import org.xtuml.bp.ui.graphics.parts.TextEditPart;
import org.xtuml.bp.ui.graphics.print.PrintDiagramOperation;
import org.xtuml.bp.ui.graphics.properties.GraphicsPropertySourceProvider;
import org.xtuml.bp.ui.graphics.providers.CanvasEditorContextMenuProvider;
import org.xtuml.bp.ui.graphics.selection.GraphicalSelectionManager;
import org.xtuml.bp.ui.graphics.tools.GraphicalPanningSelectionTool;
import org.xtuml.bp.ui.properties.BridgepointPropertySheetPage;
import org.xtuml.bp.ui.text.activity.ActivityEditorInput;
import org.xtuml.bp.ui.text.masl.MASLEditorInput;
import org.xtuml.bp.ui.text.masl.MASLPartListener;
public class GraphicalEditor extends GraphicalEditorWithFlyoutPalette implements
IPartListener, IPropertyChangeListener {
private static ArrayList<GraphicalEditor> fInstances = new ArrayList<GraphicalEditor>();
private ModelEditor fParentEditor;
public GraphicalEditor(ModelEditor parent) {
super();
fInstances.add(this);
fParentEditor = parent;
}
public ModelEditor getParentEditor() {
return fParentEditor;
}
public static GraphicalEditor getEditor(Model_c model) {
for (GraphicalEditor editor : fInstances) {
if (editor.getModel() == model)
return editor;
}
return null;
}
Model_c fModel;
private PaletteRoot fPaletteRoot;
private PaletteDrawer fToolDrawer;
private static final String OPEN = "open";
protected Action undo, redo, selectAll;
protected CanvasPasteAction paste;
protected CanvasCutAction cut;
protected CanvasCopyAction copy;
protected Action open, delete, rename;
protected Action print;
private GraphicsEditorListener fEditorListener;
private String DIAGRAM_VIEWPORT_X = "__DIAGRAM_VIEWPORT_X"; //$NON-NLS-1$
private String DIAGRAM_VIEWPORT_Y = "__DIAGRAM_VIEWPORT_Y"; //$NON-NLS-1$
private String DIAGRAM_ZOOM = "__DIAGRAM_ZOOM"; //$NON-NLS-1$
private static Font diagramFont;
private HashMap<IFigure, BPToolTipHelper> tooltipMap = new HashMap<IFigure, BPToolTipHelper>();
@Override
protected FlyoutPreferences getPalettePreferences() {
FlyoutPreferences preferences = super.getPalettePreferences();
preferences.setPaletteState(FlyoutPaletteComposite.STATE_PINNED_OPEN);
return preferences;
}
public void refreshPalette() {
fPaletteRoot = null;
getEditDomain().setPaletteRoot(getPaletteRoot());
}
@Override
protected PaletteRoot getPaletteRoot() {
if (fPaletteRoot == null || fPaletteRoot.getChildren().isEmpty()) {
if (fPaletteRoot == null)
fPaletteRoot = new PaletteRoot();
PaletteGroup controlGroup = new PaletteGroup("");
ToolEntry tool = new PanningSelectionToolEntry() {
@Override
public Tool createTool() {
Tool tool;
try {
tool = (Tool) GraphicalPanningSelectionTool.class
.newInstance();
} catch (IllegalAccessException iae) {
return null;
} catch (InstantiationException ie) {
return null;
}
tool.setProperties(getToolProperties());
return tool;
}
};
getEditDomain().setActiveTool(tool.createTool());
controlGroup.add(tool);
ToolEntry zoomToolEntry = new ZoomToolEntry(
"Zoom Tool",
"- Left click to zoom in. "
+ "\n- Hold shift and left click to zoom out. "
+ "\n- Select a group of symbols to zoom selection. "
+ "\n- Hold ctrl and left click to zoom all. "
+ "\n- Hold ctrl and use the mouse wheel to zoom in and out.",
CorePlugin.getImageDescriptor("zoomAll.gif"), CorePlugin.getImageDescriptor("zoomAll.gif"), this); //$NON-NLS-1$ $NON-NLS-2$
controlGroup.add(zoomToolEntry);
fPaletteRoot.add(controlGroup);
fPaletteRoot.setDefaultEntry(tool);
fToolDrawer = new PaletteDrawer("Default Toolset");
fPaletteRoot.add(fToolDrawer);
fillPalette(fToolDrawer);
}
return fPaletteRoot;
}
private void fillPalette(PaletteDrawer drawer) {
// have the editor's canvas create the tool objects that perform the
// tools' functionality
Model_c canvas = fModel;
if (canvas == null)
return;
canvas.Initializetools();
// for each tool employed by the canvas
ModelTool_c[] tools = ModelTool_c.getManyCT_MTLsOnR100(canvas);
ModelTool_c tool = null;
ArrayList<PaletteDrawer> createdDrawers = new ArrayList<PaletteDrawer>();
for (int i1 = 0; i1 < tools.length; i1++) {
tool = tools[i1];
// do not allow tool button if the element specification
// indicates that this element is created along with
// the canvas
final ElementSpecification_c elem = ElementSpecification_c
.getOneGD_ESOnR103(tool);
if (elem != null && !elem.getCreationrule().equals("manual")) // $NON-NLS-1$
continue;
// if there is a element-specification associated with this tool's
// function
// (because it is a shape-tool)
if (elem != null) {
boolean isPackage = false;
if (getModel().getRepresents() instanceof Package_c
|| getModel().getRepresents() instanceof Component_c) {
isPackage = true;
if (getModel().getRepresents() instanceof Component_c) {
Component_c self = (Component_c) getModel()
.getRepresents();
// If this component is under a EP_PKG then we do NOT
// want to allow specialized packages to be created in
// it.
//
//
// If the component is NOT under a EP_PKG then we do
// allow
// specialized packages, but we do NOT allow an eP_PKG
// to be
// created in the component.
Package_c pkg = Package_c
.getOneEP_PKGOnR8000(PackageableElement_c
.getOnePE_PEOnR8001(self));
Component_c comp = Component_c
.getOneC_COnR8003(PackageableElement_c
.getOnePE_PEOnR8001(self));
boolean isInGenericPackage = ((pkg != null) || (comp != null));
int toolType = tool.getOoa_type();
if (isInGenericPackage) {
switch (toolType) {
case Ooatype_c.Sequence:
case Ooatype_c.Communication:
case Ooatype_c.UseCaseDiagram:
case Ooatype_c.Activity:
continue;
default:
break;
}
} else {
switch (toolType) {
case Ooatype_c.Package:
continue;
case Ooatype_c.Interface:
continue;
case Ooatype_c.UserDataType:
continue;
default:
break;
}
}
}
}
String category = elem.getToolcategory();
ArrayList<PaletteDrawer> drawers = new ArrayList<PaletteDrawer>();
if (category.equals("") || !isPackage) {
drawers.add(drawer);
}
if (isPackage) {
String[] categories = category.split(","); // $NON-NLS-1$
for (int i = 0; i < categories.length; i++) {
String label = categories[i].trim();
if (label.equals(""))
continue;
boolean found = false;
for (PaletteDrawer created : createdDrawers) {
if (created.getLabel().equals(label)) {
found = true;
drawers.add(created);
break;
}
}
if (!found) {
PaletteDrawer newDrawer = new PaletteDrawer(label);
newDrawer
.setInitialState(PaletteDrawer.INITIAL_STATE_CLOSED);
fPaletteRoot.add(newDrawer);
createdDrawers.add(newDrawer);
drawers.add(newDrawer);
}
}
}
if (elem.getSymboltype().equals("shape")) {
for (PaletteDrawer group : drawers) {
GraphicsCreationToolEntry entry = new GraphicsCreationToolEntry(
elem.getName(), "New " + elem.getName(),
new ShapeCreationFactory(),
CorePlugin.getImageDescriptor(elem
.getIconname()),
CorePlugin.getImageDescriptor(elem
.getIconname()), tool.getOoa_type());
entry.setSmallIcon(CorePlugin.getImageDescriptor(elem
.getIconname()));
entry.setLargeIcon(CorePlugin.getImageDescriptor(elem
.getIconname()));
group.add(entry);
}
} else if (elem.getSymboltype().equals("connector")) {
for (PaletteDrawer group : drawers) {
ConnectionCreationToolEntry entry = new GraphicsConnectionCreationToolEntry(
elem.getName(), "New " + elem.getName(),
new ConnectorCreationFactory(),
CorePlugin.getImageDescriptor(elem
.getIconname()),
CorePlugin.getImageDescriptor(elem
.getIconname()), tool.getOoa_type());
entry.setSmallIcon(CorePlugin.getImageDescriptor(elem
.getIconname()));
entry.setLargeIcon(CorePlugin.getImageDescriptor(elem
.getIconname()));
group.add(entry);
}
}
}
}
}
@SuppressWarnings("unchecked")
public Object getAdapter(Class adapter) {
if (adapter.equals(IContentOutlinePage.class)) {
return getContentOutline();
}
if (adapter.equals(IPropertySheetPage.class)) {
return getPropertySheet();
}
if (adapter.equals(ZoomManager.class)) {
return (ZoomManager) getGraphicalViewer().getProperty(
ZoomManager.class.toString());
}
return super.getAdapter(adapter);
}
private Object getPropertySheet() {
PropertySheetPage pss = new BridgepointPropertySheetPage();
pss.setPropertySourceProvider(new GraphicsPropertySourceProvider());
return pss;
}
private Object getContentOutline() {
return new GraphicalOutlinePage(getGraphicalViewer(), this);
}
@Override
public void doSave(IProgressMonitor monitor) {
// autosaved, nothing to do here
}
@Override
public void init(IEditorSite site, IEditorInput input)
throws PartInitException {
if (input instanceof GraphicalEditorInput) {
GraphicalEditorInput canvasInput = (GraphicalEditorInput) input;
fModel = canvasInput.getInput();
fEditorListener = new GraphicsEditorListener(this);
CanvasPlugin.setGraphicalRepresents(fModel);
Ooaofooa.getDefaultInstance().addModelChangeListener(
fEditorListener);
Ooaofgraphics.getDefaultInstance().addModelChangeListener(
fEditorListener);
site.getPage().addPartListener(this);
getTransactionManager().addTransactionListener(fEditorListener);
setName(canvasInput.getName());
setEditDomain(new GraphicalEditDomain(this, fModel.getRepresents()));
Gr_c.cur_model = fModel;
super.init(site, input);
} else if (input instanceof FileEditorInput) {
PersistableModelComponent pmc = PersistenceManager
.findOrCreateComponent(((FileEditorInput) input).getFile()
.getFullPath());
if (pmc != null) {
if (!pmc.isLoaded()) {
try {
pmc.load(new NullProgressMonitor());
} catch (CoreException e) {
PartInitException pie = new PartInitException(
CorePlugin.createImportErrorStatus(true,
"Error loading model element"));
pie.fillInStackTrace();
throw pie;
}
}
GraphicalEditorInput cei = GraphicalEditorInput
.createInstance(pmc.getRootModelElement());
init(site, cei);
}
} else if(input instanceof SimpleGraphicalEditorInput) {
SimpleGraphicalEditorInput sgei = (SimpleGraphicalEditorInput) input;
fModel = sgei.getModel();
fEditorListener = new GraphicsEditorListener(this);
Ooaofgraphics.getDefaultInstance().addModelChangeListener(fEditorListener);
site.getPage().addPartListener(this);
setName(sgei.getName());
setEditDomain(new GraphicalEditDomain(this, fModel.getRepresents()));
Gr_c.cur_model = fModel;
super.init(site, input);
}
}
public Control getCanvas() {
if (getGraphicalViewer() == null)
return null;
return getGraphicalControl();
}
public void setName(String name) {
setPartName(name);
}
@Override
public String getTitleToolTip() {
Object element = getModel().getRepresents();
if ( element == null)
return "";
if (!(element instanceof NonRootModelElement))
return getPartName();
return HierarchyUtil.Getpath(element);
}
public void setEditorInput(IEditorInput input) {
setInput(input);
}
@Override
public boolean isDirty() {
// for now all BP diagrams are auto-persisted
return false;
}
@Override
protected void configureGraphicalViewer() {
super.configureGraphicalViewer();
GraphicalViewer viewer = getGraphicalViewer();
Gr_c.cur_canvas = (Canvas) getCanvas();
viewer.setRootEditPart(new GraphicsScalableFreeformEditPart());
// Zoom
GraphicalZoomManager manager = (GraphicalZoomManager) getGraphicalViewer()
.getProperty(ZoomManager.class.toString());
if (manager != null) {
manager.setModel(getModel());
manager.addZoomListener(new ZoomListener() {
@Override
public void zoomChanged(double zoom) {
storeZoomValue(zoom);
}
});
List<String> zoomLevels = new ArrayList<String>(3);
zoomLevels.add(ZoomManager.FIT_ALL);
zoomLevels.add(ZoomManager.FIT_WIDTH);
zoomLevels.add(ZoomManager.FIT_HEIGHT);
zoomLevels.add(GraphicalZoomManager.FIT_SELECTION);
manager.setZoomLevelContributions(zoomLevels);
manager.setZoomLevels(new double[] { .10, .20, .25, .50, .75, 1.00,
1.25, 1.50, 1.75, 2.00, 2.25, 2.50, 3.00, 3.50, 4.00 });
double zoom = getZoom();
if (zoom != -1) {
manager.configureZoomAtStartup(zoom);
}
}
// Actions
IAction zoomIn = new ZoomInAction(manager);
IAction zoomOut = new ZoomOutAction(manager);
getActionRegistry().registerAction(zoomIn);
getActionRegistry().registerAction(zoomOut);
// Scroll-wheel Zoom
getGraphicalViewer().setProperty(
MouseWheelHandler.KeyGenerator.getKey(SWT.MOD1),
MouseWheelZoomHandler.SINGLETON);
viewer.setEditPartFactory(new GraphicsEditPartFactory());
viewer.setKeyHandler(new GraphicalViewerKeyHandler(viewer)
.setParent(getCommonKeyHandler()));
ContextMenuProvider cmProvider = new CanvasEditorContextMenuProvider(
viewer, this);
viewer.setContextMenu(cmProvider);
getSite().registerContextMenu(cmProvider, viewer);
viewer.setSelectionManager(new GraphicalSelectionManager());
}
@Override
protected void createGraphicalViewer(Composite parent) {
// if the diagram to be opened has no elements, or
// in the case of a component diagram only one then
// we initialize the scroll and zoom values to defaults
GraphicalElement_c[] children = GraphicalElement_c
.getManyGD_GEsOnR1(fModel);
if (children.length == 0
|| (children.length == 1 && fModel.Hascontainersymbol())) {
storeZoomValue(1.0);
setViewportLocationInStorage(4000, 3000);
}
GraphicalViewer viewer = new ScrollingGraphicalViewer() {
@Override
public void reveal(EditPart part) {
// do not support reveal at this time
// as the only time it is used is during
// selection of a graphical element when
// it is not completely visible
// We at this time do not believe that this
// is a "good feature"
}
/*
* override setEditDomain where the event dispatcher object is
* created in order to use BridgePoint custom tooltip helper
*/
@Override
public void setEditDomain(EditDomain domain){
super.setEditDomain(domain);
getLightweightSystem().setEventDispatcher(new DomainEventDispatcher(domain, this){
// Override the creation of ToolTip helper object
BPToolTipHelper defaultHelper;
@Override
protected ToolTipHelper getToolTipHelper() {
/*
* Create new helper each time to support multi tool tip
* window. In order to associate their tooltip helper
* with their editor to hide when the editor is not
* visible, reshow when it is visible, a hash map
* shall be created to store created helper, to be
* notified by editor visiblity change
*/
IFigure hoverSource = this.getCursorTarget();
if(hoverSource instanceof TextFlow) {
hoverSource = hoverSource.getParent();
}
if(hoverSource instanceof FlowPage) {
hoverSource = hoverSource.getParent();
}
if (hoverSource instanceof ShapeImageFigure || hoverSource instanceof DecoratedPolylineConnection){
BPToolTipHelper existedHelper = tooltipMap.get(hoverSource);
if ( existedHelper != null)
return existedHelper;
BPToolTipHelper newHelper = new BPToolTipHelper(control);
tooltipMap.put(hoverSource,newHelper);
return newHelper;
}
if (defaultHelper == null)
defaultHelper = new BPToolTipHelper(control);
// Notify all editor helpers to close their simple tooltip if up
Collection<BPToolTipHelper> helpers = tooltipMap.values();
for (BPToolTipHelper helper : helpers) {
helper.hideSimpleToolTip();
}
return defaultHelper;
}
});
}
};
viewer.createControl(parent);
setGraphicalViewer(viewer);
configureGraphicalViewer();
hookGraphicalViewer();
initializeGraphicalViewer();
((FigureCanvas) getCanvas()).getVerticalBar().addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
storeViewportLocation();
}
});
((FigureCanvas) getCanvas()).getHorizontalBar().addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
storeViewportLocation();
}
});
// add a control listener to zoom fit once resized
((FigureCanvas) getCanvas()).addControlListener(new ControlListener() {
@Override
public void controlResized(ControlEvent e) {
if (shouldZoomFit()
&& ((FigureCanvas) getCanvas()).getViewport()
.getHorizontalRangeModel().getExtent() > 100) {
((FigureCanvas) getCanvas()).getViewport().revalidate();
((FigureCanvas) getCanvas()).getViewport()
.getUpdateManager().performUpdate();
zoomAll();
}
}
@Override
public void controlMoved(ControlEvent e) {
// do nothing
}
});
((FigureCanvas) getCanvas()).setFont(getFont());
getCanvas().addFocusListener(new FocusListener() {
public void focusGained(FocusEvent ev) {
EditorUtil.refreshEditorTab();
}
public void focusLost(FocusEvent ev) { /* do nothing */ }
});
}
protected boolean shouldZoomFit() {
if (getZoom() == -1) {
return true;
}
return false;
}
protected Diagram_c getDiagram() {
Diagram_c diagram = Diagram_c.getOneDIM_DIAOnR18(getModel());
return diagram;
}
public double getZoom() {
double zoom = -1;
try {
zoom = Activator.getDefault().getDialogSettings().getDouble(
getZoomKey());
} catch (NumberFormatException e) {
// value was never set or was set incorrectly
// do not worry about logging an error just return
// a non set value
return -1;
}
return zoom;
}
@Override
protected void initializeGraphicalViewer() {
super.initializeGraphicalViewer();
if (fModel != null) {
getGraphicalViewer().setContents(fModel);
}
configureGridOptions();
// add self to font property change listener list
JFaceResources.getFontRegistry().addListener(this);
}
public void configureGridOptions() {
boolean showGrid = CorePlugin.getDefault().getPreferenceStore()
.getBoolean(BridgePointPreferencesStore.SHOW_GRID);
getGraphicalViewer().setProperty(SnapToGrid.PROPERTY_GRID_VISIBLE,
new Boolean(showGrid));
boolean snapGrid = CorePlugin.getDefault().getPreferenceStore()
.getBoolean(BridgePointPreferencesStore.SNAP_TO_GRID);
getGraphicalViewer().setProperty(SnapToGrid.PROPERTY_GRID_ENABLED,
new Boolean(snapGrid));
int gridSpacing = CorePlugin.getDefault().getPreferenceStore().getInt(
BridgePointPreferencesStore.GRID_SPACING);
getGraphicalViewer().setProperty(SnapToGrid.PROPERTY_GRID_SPACING,
new Dimension(gridSpacing, gridSpacing));
}
private KeyHandler getCommonKeyHandler() {
return null;
}
public IAction getOpenAction() {
return open;
}
public IAction getUndoAction() {
return undo;
}
public IAction getRedoAction() {
return redo;
}
public CanvasCutAction getCutAction() {
return cut;
}
public CanvasCopyAction getCopyAction() {
return copy;
}
public CanvasPasteAction getPasteAction() {
return paste;
}
public IAction getSelectAllAction() {
return selectAll;
}
public IAction getDeleteAction() {
return delete;
}
public IAction getRenameAction() {
return rename;
}
@SuppressWarnings("unchecked")
@Override
protected void createActions() {
super.createActions();
// 'New' is provided as a sub-menu only. See 'createMenus'
open = new Action(OPEN) {
public void run() {
handleOpen(null, fModel,
(IStructuredSelection) getGraphicalViewer()
.getSelectionManager().getSelection());
}
};
open.setText("Open");
open.setToolTipText("Open this model Element");
// 'Open With' is provided as a sub-menu only. See 'createMenus'
cut = new CanvasCutAction(this);
cut.setId(ActionFactory.CUT.getId());
copy = new CanvasCopyAction(this);
copy.setId(ActionFactory.COPY.getId());
getActionRegistry().registerAction(copy);
getActionRegistry().registerAction(cut);
// line
paste = new CanvasPasteAction(this);
paste.setId(ActionFactory.PASTE.getId());
getActionRegistry().registerAction(paste);
TransactionManager manager = getTransactionManager();
undo = manager.getUndoAction();
undo.setId(ActionFactory.UNDO.getId());
redo = manager.getRedoAction();
redo.setId(ActionFactory.REDO.getId());
getActionRegistry().registerAction(undo);
getActionRegistry().registerAction(redo);
selectAll = new SelectAllAction(this) {
@Override
public void run() {
GraphicalViewer viewer = (GraphicalViewer) getAdapter(GraphicalViewer.class);
if (viewer != null) {
viewer
.setSelection(new StructuredSelection(filterOutTextEditParts(
getAllSymbols(getGraphicalViewer(), fModel
.Hascontainersymbol()))));
}
}
private List<GraphicalEditPart> filterOutTextEditParts(
List<GraphicalEditPart> allSymbols) {
List<GraphicalEditPart> filtered = new ArrayList<GraphicalEditPart>();
for(GraphicalEditPart part : allSymbols) {
// we filter text edit parts as they are not really selectable,
// which in-turn causes duplicates in the selection list as they
// are migrated to their owning part
if(!(part instanceof TextEditPart)) {
filtered.add(part);
}
}
return filtered;
}
};
selectAll.setId(ActionFactory.SELECT_ALL.getId());
getActionRegistry().registerAction(selectAll);
//
// Delete and Rename are retargetable actions defined by core.
//
delete = new Action() {
public void run() {
Transaction transaction = null;
TransactionManager manager = getTransactionManager();
try {
transaction = manager.startTransaction(
Transaction.DELETE_TRANS, new ModelRoot[] {
Ooaofooa.getDefaultInstance(),
Ooaofgraphics.getDefaultInstance() });
IStructuredSelection structuredSelection = Selection
.getInstance().getStructuredSelection();
Iterator<?> iterator = structuredSelection.iterator();
while (iterator.hasNext()) {
NonRootModelElement element = (NonRootModelElement) iterator
.next();
if (element instanceof GraphicalElement_c) {
((GraphicalElement_c) element).Dispose();
Selection.getInstance()
.removeFromSelection(element);
}
}
if (!Selection.getInstance().getStructuredSelection()
.isEmpty()) {
CorePlugin.getDeleteAction().run();
}
manager.endTransaction(transaction);
} catch (TransactionException e) {
if (transaction != null && manager != null
&& manager.getActiveTransaction() == transaction)
manager.cancelTransaction(transaction);
CorePlugin.logError("Unable to start transaction", e);
}
for (Object part : getGraphicalViewer().getSelectedEditParts()) {
if (part instanceof EditPart) {
if(((EditPart) part).getParent() != null) {
((EditPart) part).getParent().refresh();
}
}
}
}
@Override
public boolean isEnabled() {
return CanvasEditorContextMenuProvider
.enableDelete((IStructuredSelection) getSite()
.getSelectionProvider().getSelection());
}
};
delete.setText("Delete");
delete.setToolTipText("Delete the Model Element");
delete.setId(ActionFactory.DELETE.getId());
getActionRegistry().registerAction(delete);
// rename = CorePlugin.getRenameAction(treeViewer); <- need to
// generalize renameAction first
rename = new Action() {
public void run() {
Object selection = Selection.getInstance().getStructuredSelection()
.getFirstElement();
if (selection != null) {
String oldName = Cl_c.Getname(selection);
Shell sh = getSite().getShell();
RenameAction.handleRename(selection, oldName, sh);
}
}
@Override
public boolean isEnabled() {
if(CanvasCutAction.selectionContainsOnlyCoreElements()) {
return RenameAction.canRenameAction();
}
return false;
}
};
rename.setText("Rename");
rename.setToolTipText("Rename the Model Element");
rename.setEnabled(true); // Retargetable Actions work removes this line
rename.setId(ActionFactory.RENAME.getId());
getActionRegistry().registerAction(rename);
print = new Action() {
public void run() {
handlePrint();
}
};
print.setText("Print");
print.setToolTipText("Print the Diagram");
print.setEnabled(true);
print.setId(ActionFactory.PRINT.getId());
getActionRegistry().registerAction(print);
ActionRegistry registry = getActionRegistry();
IAction action;
action = new Action() {
@Override
public void run() {
zoomAll();
}
};
action.setText("Zoom Page");
action.setImageDescriptor(CorePlugin.getImageDescriptor("zoomAll.gif")); // $NON-NLS-1$
action.setId(GraphicalActionConstants.ZOOM_PAGE);
action.setToolTipText("Click to zoom the entire contents");
registry.registerAction(action);
action = new Action() {
@Override
public void run() {
zoomSelected();
}
};
action.setText("Zoom Selection");
action.setImageDescriptor(CorePlugin.getImageDescriptor("zoomSel.gif")); // $NON-NLS-1$
action.setId(GraphicalActionConstants.ZOOM_SEL);
action.setToolTipText("Click to zoom the current selection");
registry.registerAction(action);
action = new MatchWidthAction(this);
registry.registerAction(action);
getSelectionActions().add(action.getId());
action = new MatchHeightAction(this);
registry.registerAction(action);
getSelectionActions().add(action.getId());
action = new DirectEditAction((IWorkbenchPart) this);
registry.registerAction(action);
getSelectionActions().add(action.getId());
action = new AlignmentAction((IWorkbenchPart) this,
PositionConstants.LEFT);
registry.registerAction(action);
getSelectionActions().add(action.getId());
action = new AlignmentAction((IWorkbenchPart) this,
PositionConstants.RIGHT);
registry.registerAction(action);
getSelectionActions().add(action.getId());
action = new AlignmentAction((IWorkbenchPart) this,
PositionConstants.TOP);
registry.registerAction(action);
getSelectionActions().add(action.getId());
action = new AlignmentAction((IWorkbenchPart) this,
PositionConstants.BOTTOM);
registry.registerAction(action);
getSelectionActions().add(action.getId());
action = new AlignmentAction((IWorkbenchPart) this,
PositionConstants.CENTER);
registry.registerAction(action);
getSelectionActions().add(action.getId());
action = new AlignmentAction((IWorkbenchPart) this,
PositionConstants.MIDDLE);
registry.registerAction(action);
getSelectionActions().add(action.getId());
}
public TransactionManager getTransactionManager() {
return fModel.getTransactionManager();
}
protected void handlePrint() {
int style = PlatformUI.getWorkbench().getActiveWorkbenchWindow()
.getShell().getStyle();
Shell shell = new Shell((style & SWT.MIRRORED) != 0 ? SWT.RIGHT_TO_LEFT
: SWT.NONE);
PrintDialog dialog = new PrintDialog(shell, SWT.NULL);
PrinterData data = dialog.open();
if (data != null) {
PrintGraphicalViewerOperation operation = new PrintDiagramOperation(
new Printer(data), getGraphicalViewer(), this);
operation.setPrintMode(PrintGraphicalViewerOperation.FIT_PAGE);
operation.run("Print canvas.");
}
}
/**
* Fire up an editor
*/
public static void handleOpen(Point location, Model_c model,
IStructuredSelection selection) {
if (!selection.isEmpty()) {
Object current = selection.iterator().next();
if (current instanceof EditPart) {
current = ((EditPart) current).getModel();
if (current instanceof FloatingText_c) {
FloatingText_c text = (FloatingText_c) current;
Connector_c connector = Connector_c.getOneGD_CONOnR8(text);
if (connector != null)
current = connector;
Shape_c shape = Shape_c.getOneGD_SHPOnR27(text);
if (shape != null)
current = shape;
}
if (current instanceof Model_c) {
current = ((Model_c) current).getRepresents();
} else if (current instanceof Shape_c) {
GraphicalElement_c elem = GraphicalElement_c
.getOneGD_GEOnR2((Shape_c) current);
current = elem.getRepresents();
} else if (current instanceof Connector_c) {
GraphicalElement_c elem = GraphicalElement_c
.getOneGD_GEOnR2((Connector_c) current);
current = elem.getRepresents();
}
}
// if a mouse event was given, and the selected element is a
// model-class
if (location != null && current instanceof ModelClass_c) {
// find the graphical element that represents the selected
// model-class
final Object finalCurrent = current;
GraphicalElement_c element = GraphicalElement_c
.GraphicalElementInstance(model.getModelRoot(),
new ClassQueryInterface_c() {
public boolean evaluate(Object candidate) {
return ((GraphicalElement_c) candidate)
.getRepresents() == finalCurrent;
}
});
// ask the shape associated with the above graphical-element
// what
// the mouse-event represents a double-click on, since the shape
// may be displaying an icon which is a link to a different
// model
// element
Shape_c shape = Shape_c.getOneGD_SHPOnR2(element);
Graphelement_c graphElement = Graphelement_c
.getOneDIM_GEOnR23(element);
current = shape.Getrepresents((int) (location.x - graphElement
.getPositionx()), (int) (location.y - graphElement
.getPositiony()));
}
// see if the current element should open
// something other than itself
current = EditorUtil.getElementToEdit(current);
String name = current.getClass().getName();
//
// Get the registry
//
IExtensionRegistry reg = Platform.getExtensionRegistry();
//
// Get all the plugins that have extended this point
//
IExtensionPoint extPt = reg
.getExtensionPoint("org.xtuml.bp.core.editors"); //$NON-NLS-1$
IExtension[] exts = extPt.getExtensions();
// Repeat for each extension until we find a default editor
for (int i = 0; i < exts.length; i++) {
IConfigurationElement[] elems = exts[i]
.getConfigurationElements();
for (int j = 0; j < elems.length; j++) {
// Find the editor elements
if (elems[j].getName().equals("editor")) { //$NON-NLS-1$
IConfigurationElement[] edElems = elems[j]
.getChildren();
for (int k = 0; k < edElems.length; k++) {
//
// Is this editor the default for the current model
// element ?
//
if (edElems[k].getName().equals("defaultFor") && //$NON-NLS-1$
edElems[k].getAttribute("class").equals(
name)) {
try {
//
// Get the class supplied for the input
//
// always use this bundle, other graphical
// plug-ins
// will provide their own open method
Bundle bundle = Platform.getBundle(elems[j]
.getContributor().getName());
Class<?> inputClass = bundle
.loadClass(elems[j]
.getAttribute("input")); //$NON-NLS-1$
String editorId = elems[j].getAttribute("class"); //$NON-NLS-1$
String dialect = "";
if (editorId.equals(ActivityEditorInput.EDITOR_ID)) {
// see if the current element should open
// something other than itself
Object dialectObj = current;
if (dialectObj instanceof StateMachineState_c) {
StateMachineState_c state = (StateMachineState_c) dialectObj;
Action_c action = Action_c.getOneSM_ACTOnR514(ActionHome_c
.getOneSM_AHOnR513((MooreActionHome_c.getOneSM_MOAHOnR511(state))));
if (action != null) {
dialectObj = action;
}
}
// Get the value of the dialect attribute
Method getDialectMethod = dialectObj.getClass().getMethod("getDialect"); //$NON-NLS-1$
dialect = (String) getDialectMethod.invoke(dialectObj, new Object[]{});
// If the "dialect" attribute is neither "oal" nor "masl",
// check the default language preference. Set "dialect" to
// be the preference value and open the proper editor.
if (dialect.isEmpty()) {
IPreferenceStore store = CorePlugin.getDefault().getPreferenceStore();
String option = store
.getString(BridgePointPreferencesStore.DEFAULT_ACTION_LANGUAGE_DIALECT);
Class[] type = new Class[1];
type[0] = String.class;
Method setDialectMethod = dialectObj.getClass().getMethod("setDialect", type); //$NON-NLS-1$
Object[] args = new Object[1];
if (option.equals("MASL")) { //$NON-NLS-1$
dialect = "masl"; //$NON-NLS-1$
args[0] = dialect;
} else {
dialect = "oal"; //$NON-NLS-1$
args[0] = dialect;
}
setDialectMethod.invoke(dialectObj, args);
}
}
// check to see if we need to open the MASL editor
if (MASLEditorInput.isSupported(current) && dialect.equals("masl")) { //$NON-NLS-1$
inputClass = bundle.loadClass("org.xtuml.bp.ui.text.masl.MASLEditorInput"); //$NON-NLS-1$
try {
editorId = (String) inputClass.getField("EDITOR_ID").get(null); //$NON-NLS-1$
} catch (NoSuchFieldException e) {
System.out.println(e);
}
}
Class<?>[] type = new Class[1];
type[0] = Object.class;
//
// Dynamically get the method
// createInstance, the supplied class must
// implement this
//
Method createInstance = inputClass
.getMethod("createInstance", type); //$NON-NLS-1$
Object[] args = new Object[1];
args[0] = current;
//
// Invoke the method.
// The method is static; no instance is
// needed, so first argument is null
//
IEditorInput input = (IEditorInput) createInstance
.invoke(null, args);
//
// pass the input to the Eclipse editor,
// along with the class name supplied by
// the extending plugin.
//
if (input != null) {
IWorkbenchPage page = (IWorkbenchPage) PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage();
if (MASLEditorInput.EDITOR_ID == editorId) {
page.addPartListener((IPartListener2) new MASLPartListener());
}
page.openEditor(input, editorId);
}
return;
} catch (ClassNotFoundException e) {
CanvasPlugin.logError(
"Input Class not found", e); //$NON-NLS-1$
} catch (NoSuchMethodException e) {
CanvasPlugin
.logError(
"Class does not implement static method createInstance", e); //$NON-NLS-1$
} catch (InvocationTargetException e) {
CanvasPlugin
.logError(
"Exception occured on invocation of static method createInstance of the Target", e.getTargetException()); //$NON-NLS-1$
} catch (IllegalAccessException e) {
CanvasPlugin
.logError(
"Target does not support static method createInstance", e); //$NON-NLS-1$
} catch (PartInitException e) {
CanvasPlugin.logError(
"Could not activate Editor", e); //$NON-NLS-1$
}
}
}
}
}
}
}
}
public Model_c getModel() {
return fModel;
}
public void refresh() {
// this refresh will update contents, but
// not visually refresh children
if (getGraphicalViewer() != null
&& getGraphicalViewer().getContents() != null) {
getGraphicalViewer().getContents().refresh();
for (Object child : getGraphicalViewer().getContents()
.getChildren()) {
EditPart part = (EditPart) child;
part.refresh();
}
}
refreshPartName();
}
public void refreshPartName() {
if (fModel != null && fModel.getRepresents() instanceof NonRootModelElement) {
setName(GraphicsUtil
.getCanvasEditorTitle((NonRootModelElement) fModel
.getRepresents()));
}
}
@Override
public void dispose() {
getEditorSite().getPage().removePartListener(this);
super.dispose();
Ooaofooa.getDefaultInstance()
.removeModelChangeListener(fEditorListener);
Ooaofgraphics.getDefaultInstance().removeModelChangeListener(
fEditorListener);
if (getTransactionManager() != null)
getTransactionManager().removeTransactionListener(fEditorListener);
fInstances.remove(this);
if(fInstances.isEmpty()) {
// if we are the last diagram editor, then dispose
// the font used
if (diagramFont != null
&& diagramFont != PlatformUI.getWorkbench().getDisplay()
.getSystemFont()) {
diagramFont.dispose();
diagramFont = null;
}
}
Collection<BPToolTipHelper> helpers = tooltipMap.values();
for (BPToolTipHelper helper : helpers) {
helper.dispose();
}
JFaceResources.getFontRegistry().removeListener(this);
}
public static Font getFont() {
if(diagramFont == null || diagramFont.isDisposed()) {
String prefFont = JFacePreferences.getPreferenceStore().getString(
"org.xtuml.bp.canvas.font");//$NON-NLS-1$
if(prefFont == null || prefFont.equals("")) {
// something strange has happened, should not occur
// but to be safe set a default
diagramFont = PlatformUI.getWorkbench().getDisplay()
.getSystemFont();
} else {
prefFont = prefFont.substring(0, prefFont.indexOf(';'));
FontData prefFontData = new FontData(prefFont);
int fontSize = (int) (prefFontData.getHeight());
prefFontData.setHeight(fontSize);
diagramFont = new Font(PlatformUI.getWorkbench().getDisplay(), prefFontData);
}
}
return diagramFont;
}
@Override
public void partActivated(IWorkbenchPart part) {
if (part == this || part == getParentEditor()) {
// when activated, synchronize the graphical selection
// with the core selection
((GraphicalSelectionManager) getGraphicalViewer()
.getSelectionManager()).synchronizeSelectionToCore();
// additionally reset the current canvas variable
// in the Gr_c class
Gr_c.cur_canvas = (Canvas) getCanvas();
// Notify all editor tooltip helpers to redisplay the tooltip if
// possible
Collection<BPToolTipHelper> helpers = tooltipMap.values();
for (BPToolTipHelper helper : helpers) {
helper.activate();
}
}
}
@Override
public void partBroughtToTop(IWorkbenchPart part) {
// do nothing
}
@Override
public void partClosed(IWorkbenchPart part) {
// do nothing
}
@Override
public void partDeactivated(IWorkbenchPart part) {
// Notify all editor tooltip helpers to hide the tooltips if
// visible
Collection<BPToolTipHelper> helpers = tooltipMap.values();
if (part == this || part == getParentEditor()) {
for (BPToolTipHelper helper : helpers) {
helper.deactivate();
}
}
}
@Override
public void partOpened(IWorkbenchPart part) {
// do nothing
}
public static void redrawAll() {
IEditorReference[] editorReferences = PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage()
.getEditorReferences();
for (int i = 0; i < editorReferences.length; i++) {
IEditorPart editor = editorReferences[i].getEditor(false);
if (editor instanceof ModelEditor) {
GraphicalEditor gEditor = ((ModelEditor) editor)
.getGraphicalEditor();
gEditor.redraw();
}
}
}
private void redraw() {
((GraphicalEditPart) getGraphicalViewer().getRootEditPart())
.getFigure().erase();
((GraphicalEditPart) getGraphicalViewer().getRootEditPart())
.getFigure().getUpdateManager().performUpdate();
}
@Override
public void selectionChanged(IWorkbenchPart part, ISelection selection) {
// If not the active editor, ignore selection changed.
if (this.equals(getSite().getPage().getActiveEditor()))
updateActions(getSelectionActions());
else if (getSite().getPage().getActiveEditor() instanceof ModelEditor) {
ModelEditor editor = (ModelEditor) getSite().getPage()
.getActiveEditor();
if (this.equals(editor.getActivePart())) {
updateActions(getSelectionActions());
}
}
}
public AbstractTool getTool(String toolSet, String toolName) {
for (Object entry : fPaletteRoot.getChildren()) {
if (entry instanceof PaletteDrawer) {
PaletteDrawer drawer = (PaletteDrawer) entry;
if (drawer.getLabel().equals(toolSet)) {
AbstractTool tool = getTool(toolName, drawer.getChildren());
if (tool != null)
return tool;
}
}
}
return null;
}
public void setModel(Model_c model) {
fModel = model;
}
public AbstractTool getTool(String toolName) {
return getTool(toolName, fPaletteRoot.getChildren());
}
public AbstractTool getTool(String toolName, List<?> children) {
for (Object entry : children) {
if (entry instanceof PaletteDrawer) {
PaletteDrawer drawer = (PaletteDrawer) entry;
AbstractTool tool = getTool(toolName, drawer.getChildren());
if (tool != null)
return tool;
} else {
if (entry instanceof ToolEntry) {
ToolEntry pEntry = (ToolEntry) entry;
if (pEntry.getLabel().equals(toolName)) {
return (AbstractTool) pEntry.createTool();
}
}
}
}
return null;
}
public void doDelete() {
getDeleteAction().run();
}
public void zoomAll() {
ZoomManager manager = (ZoomManager) getAdapter(ZoomManager.class);
manager.setZoomAsText(ZoomManager.FIT_ALL);
}
public void zoomOut() {
ZoomManager manager = (ZoomManager) getAdapter(ZoomManager.class);
manager.zoomOut();
}
public void zoomSelected() {
GraphicalZoomManager manager = (GraphicalZoomManager) getAdapter(ZoomManager.class);
manager.setZoomAsText(GraphicalZoomManager.FIT_SELECTION);
}
public DefaultEditDomain getDomain() {
return super.getEditDomain();
}
public void updateModel(Model_c newModel) {
newModel.Initializetools();
GraphicalViewer viewer = (GraphicalViewer) getAdapter(GraphicalViewer.class);
setModel(newModel);
Gr_c.cur_model = newModel;
GraphicalZoomManager zoomManager = (GraphicalZoomManager) getAdapter(ZoomManager.class);
zoomManager.setModel(newModel);
viewer.setContents(newModel);
GraphicalEditDomain domain = (GraphicalEditDomain) getEditDomain();
domain.setElement(newModel.getRepresents());
}
@SuppressWarnings("unchecked")
public static List<GraphicalEditPart> getAllSymbols(GraphicalViewer root,
boolean modelHasContainer) {
List<GraphicalEditPart> symbols = new ArrayList<GraphicalEditPart>();
symbols.addAll(root.getContents().getChildren());
if (modelHasContainer) {
symbols.addAll(((GraphicalEditPart) root.getContents()
.getChildren().get(0)).getChildren());
}
ArrayList<GraphicalEditPart> shapeText = new ArrayList<GraphicalEditPart>();
ArrayList<GraphicalEditPart> allConnections = new ArrayList<GraphicalEditPart>();
for (GraphicalEditPart child : symbols) {
AbstractGraphicalEditPart childPart = (AbstractGraphicalEditPart) child;
allConnections.addAll(childPart.getSourceConnections());
shapeText.addAll(child.getChildren());
}
symbols.addAll(shapeText);
// add connections that start on connections
for (Object child : allConnections) {
AbstractGraphicalEditPart childPart = (AbstractGraphicalEditPart) child;
symbols.addAll(childPart.getSourceConnections());
// add all text for the source connections
for(Object sourceCon : childPart.getSourceConnections()) {
AbstractGraphicalEditPart source = (AbstractGraphicalEditPart) sourceCon;
symbols.addAll(source.getChildren());
}
// add all text for this connector
symbols.addAll(childPart.getChildren());
}
// add any free floating connectors
allConnections.addAll(((GraphicalEditPart) root.getRootEditPart()
.getContents()).getSourceConnections());
for(Object child : ((GraphicalEditPart) root.getRootEditPart()
.getContents()).getSourceConnections()) {
AbstractGraphicalEditPart childPart = (AbstractGraphicalEditPart) child;
// add all text for the free floating connectors;
allConnections.addAll(childPart.getChildren());
}
symbols.addAll(allConnections);
// filter out all hidden elements
List<GraphicalEditPart> filteredList = new ArrayList<GraphicalEditPart>();
for(GraphicalEditPart part : symbols) {
if (part.getFigure().getParent().isVisible()) {
filteredList.add(part);
}
}
return filteredList;
}
/**
* Generates an {@link Image} of the contents of this editor.
*
* @param fitRectangle
* @return
*/
public Image getDiagramImage(Rectangle fitRectangle) {
if (fitRectangle == null)
fitRectangle = GraphicalZoomManager
.getExtentRectangle(getAllSymbols(getGraphicalViewer(),
getModel().Hascontainersymbol()));
Image image = new Image(Display.getDefault(), fitRectangle.width,
fitRectangle.height);
PrintDiagramOperation.printImage(image, getGraphicalViewer(),
fitRectangle, getModel().Hascontainersymbol(),
PrintDiagramOperation.FIT_PAGE);
return image;
}
public GraphicalViewer getGraphicalViewer() {
return super.getGraphicalViewer();
}
public Point getPersistedViewportLocation() {
IDialogSettings dialogSettings = Activator.getDefault()
.getDialogSettings();
try {
float viewportX = dialogSettings.getInt(getViewportXValueKey());
float viewportY = dialogSettings.getInt(getViewportYValueKey());
return new Point(viewportX, viewportY);
} catch (NumberFormatException e) {
// value was never set or was incorrectly
// set, do not worry about logging an error
// just return a default value
return new Point(-1, -1);
}
}
private void storeZoomValue(double zoom) {
Activator.getDefault().getDialogSettings().put(getZoomKey(), zoom);
}
public void storeViewportLocation() {
Point location = ((FigureCanvas) getCanvas()).getViewport()
.getViewLocation().getCopy();
Activator.getDefault().getDialogSettings().put(getViewportXValueKey(),
location.x);
Activator.getDefault().getDialogSettings().put(getViewportYValueKey(),
location.y);
}
private String getViewportXValueKey() {
return getDiagramId() + ":" + DIAGRAM_VIEWPORT_X;
}
private String getViewportYValueKey() {
return getDiagramId() + ":" + DIAGRAM_VIEWPORT_Y;
}
private String getZoomKey() {
return getDiagramId() + ":" + DIAGRAM_ZOOM;
}
private String getDiagramId() {
return Cl_c.Getooa_idfrominstance(getModel().getRepresents())
.toString()
+ "-" + getModel().getOoa_type();
}
/**
* Convience method for manually setting the persisted scroll values
*/
public void setViewportLocationInStorage(int x, int y) {
Activator.getDefault().getDialogSettings().put(getViewportXValueKey(),
x);
Activator.getDefault().getDialogSettings().put(getViewportYValueKey(),
y);
}
@Override
public void propertyChange(PropertyChangeEvent event) {
if(event.getProperty().equals("org.xtuml.bp.canvas.font")) {
if(diagramFont != null) {
FontData fontData = diagramFont.getFontData()[0];
if(!((FontData[]) event.getNewValue())[0].equals(fontData)) {
diagramFont.dispose();
diagramFont = null;
}
// update the figure canvas with the new font
((FigureCanvas) getCanvas()).setFont(getFont());
refresh();
}
}
}
public static int getGridSpacing() {
return CorePlugin.getDefault().getPreferenceStore().getInt(
BridgePointPreferencesStore.GRID_SPACING);
}
}
| |
/*
* Copyright 2017 Axway Software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.axway.ats.action;
import com.axway.ats.common.PublicAtsApi;
import com.axway.ats.config.AbstractConfigurator;
import com.axway.ats.config.exceptions.ConfigurationException;
import com.axway.ats.config.exceptions.NoSuchPropertyException;
import com.axway.ats.core.CoreLibraryConfigurator;
import com.axway.ats.core.utils.StringUtils;
/**
* The Action Library configuration class
*/
@PublicAtsApi
public class ActionLibraryConfigurator extends AbstractConfigurator {
private static final String PROPERTIES_FILE_NAME = "/ats.actionlibrary.properties";
//the configuration keys
private static final String PACKAGE_LOADER_DEFAULT_BOX_KEY = "actionlibrary.packageloader.defaultbox";
private static final String FILE_TRANSFER_VERBOSE_MODE = "actionlibrary.filetransfer.verbosemode";
private static final String FILE_TRANSFER_CONNECTION_TIMEOUT = "actionlibrary.filetransfer.connection.timeout";
private static final String FILE_TRANSFER_CONNECTION_INTERVAL = "actionlibrary.filetransfer.connection.interval";
private static final String FILE_TRANSFER_CONNECTION_INITIAL_DELAY = "actionlibrary.filetransfer.connection.initialdelay";
private static final String FILE_SYSTEM_COPY_FILE_START_PORT = "actionlibrary.filesystem.copyfile.start.port";
private static final String FILE_SYSTEM_COPY_FILE_END_PORT = "actionlibrary.filesystem.copyfile.end.port";
private static final String MAIL_HOST = "actionlibrary.mail.host";
private static final String MAIL_PORT = "actionlibrary.mail.port";
private static final String MAIL_TIMEOUT = "actionlibrary.mail.timeout";
private static final String MAIL_LOCAL_ADDRESS = "actionlibrary.mail.localaddress";
private static final String MAIL_SESSION_DEBUG_MODE = "actionlibrary.mail.session.debug";
private static final String MIMEPACKAGE_MAX_NESTED_LEVEL = "actionlibrary.mimepackage.maxnestedlevel";
private static final String FILE_SNAPSHOT_CHECK_MODIFICATION_TIME = "actionlibrary.filesnapshot.check.modificationtime";
private static final String FILE_SNAPSHOT_CHECK_SIZE = "actionlibrary.filesnapshot.check.size";
private static final String FILE_SNAPSHOT_CHECK_MD5 = "actionlibrary.filesnapshot.check.md5";
private static final String FILE_SNAPSHOT_CHECK_PERMISSIONS = "actionlibrary.filesnapshot.check.permissions";
private static final String FILE_SNAPSHOT_SUPPORT_HIDDEN = "actionlibrary.filesnapshot.support.hidden";
private static final String FILE_SNAPSHOT_CHECK_PROPERTIES_FILES_CONTENT = "actionlibrary.filesnapshot.check.properties.content";
private static final String FILE_SNAPSHOT_CHECK_XML_FILES_CONTENT = "actionlibrary.filesnapshot.check.xml.content";
private static final String FILE_SNAPSHOT_CHECK_INI_FILES_CONTENT = "actionlibrary.filesnapshot.check.ini.content";
private static final String FILE_SNAPSHOT_CHECK_TEXT_FILES_CONTENT = "actionlibrary.filesnapshot.check.text.content";
private static final String FILE_SNAPSHOT_INI_FILES_SECTION_START_CHAR = "actionlibrary.filesnapshot.ini.section.start.char";
private static final String FILE_SNAPSHOT_INI_FILES_COMMENT_START_CHAR = "actionlibrary.filesnapshot.ini.comment.start.char";
private static final String FILE_SNAPSHOT_INI_FILES_DELIMETER_CHAR = "actionlibrary.filesnapshot.ini.delimeter.char";
private static final String FILE_SNAPSHOT_XML_FILE_EXTENSIONS = "actionlibrary.filesnapshot.xml.file.types";
private static final String FILE_SNAPSHOT_PROPERTIES_FILE_EXTENSIONS = "actionlibrary.filesnapshot.properties.file.types";
private static final String FILE_SNAPSHOT_INI_FILE_EXTENSIONS = "actionlibrary.filesnapshot.ini.file.types";
private static final String FILE_SNAPSHOT_TEXT_FILE_EXTENSIONS = "actionlibrary.filesnapshot.text.file.types";
private static final String REST_DEFAULT_REQUEST_MEDIA_TYPE = "actionlibrary.rest.default.request.media.type";
private static final String REST_DEFAULT_REQUEST_MEDIA_CHARSET = "actionlibrary.rest.default.request.media.charset";
private static final String REST_DEFAULT_RESPONSE_MEDIA_TYPE = "actionlibrary.rest.default.response.media.type";
private static final String REST_DEFAULT_RESPONSE_MEDIA_CHARSET = "actionlibrary.rest.default.response.media.charset";
private static final String REST_KEEP_REQUEST_MEDIA_TYPE = "actionlibrary.rest.keep.request.media.type";
private static final String REST_KEEP_REQUEST_MEDIA_CHARSET = "actionlibrary.rest.keep.request.media.charset";
private static final String REST_KEEP_RESPONSE_MEDIA_TYPE = "actionlibrary.rest.keep.response.media.type";
private static final String REST_KEEP_RESPONSE_MEDIA_CHARSET = "actionlibrary.rest.keep.response.media.charset";
private static final String REST_KEEP_RESOURCE_PATH = "actionlibrary.rest.keep.response.resource.path";
private static final String REST_KEEP_REQUEST_HEADERS = "actionlibrary.rest.keep.request.headers";
private static final String REST_KEEP_REQUEST_PARAMETERS = "actionlibrary.rest.keep.request.parameters";
private static final String HTTP_KEEP_REQUEST_HEADERS = "actionlibrary.http.keep.request.headers";
private static final String HTTP_KEEP_REQUEST_PARAMETERS = "actionlibrary.http.keep.request.parameters";
private static final String HTTP_KEEP_REQUEST_BODY = "actionlibrary.http.keep.request.body";
public FileSnapshots snapshots = new FileSnapshots();
/**
* The singleton instance for this configurator
*/
private static ActionLibraryConfigurator instance;
private ActionLibraryConfigurator( String configurationSource ) {
super();
//add the resource to the repository
addConfigFileFromClassPath(configurationSource, true, false);
}
@PublicAtsApi
public static synchronized ActionLibraryConfigurator getInstance() {
if (instance == null) {
instance = new ActionLibraryConfigurator(PROPERTIES_FILE_NAME);
}
instance.reloadData();
return instance;
}
/**
* Get the default message box to be used when loading packages
*
* @return the default message box name
*/
@PublicAtsApi
public String getDefaultMessagesBox() {
return getProperty(PACKAGE_LOADER_DEFAULT_BOX_KEY);
}
/**
* Set the default message box to be used when loading packages
*
* @param defaultMessageBox the default message box name
*/
@PublicAtsApi
public void setDefaultMessagesBox(
String defaultMessageBox ) {
setTempProperty(PACKAGE_LOADER_DEFAULT_BOX_KEY, defaultMessageBox);
}
/**
* Get the file transfer verbose mode
*
* @return the file transfer verbose mode
*/
@PublicAtsApi
public boolean getFileTransferVerboseMode() {
return getBooleanProperty(FILE_TRANSFER_VERBOSE_MODE);
}
/**
* Set the file transfer verbose mode
*
* @param verboseMode if true, then log verbosity is increased
*/
@PublicAtsApi
public void setFileTransferVerboseMode(
boolean verboseMode ) {
setTempProperty(FILE_TRANSFER_VERBOSE_MODE, Boolean.toString(verboseMode));
}
/**
* Get the file transfer connection timeout
*
* @return the file transfer connection timeout
*/
@PublicAtsApi
public long getFileTransferConnectionTimeout() {
return getLongProperty(FILE_TRANSFER_CONNECTION_TIMEOUT);
}
/**
* Set the file transfer connection timeout
*
* @param defaultMessageBox the file transfer connection timeout
*/
@PublicAtsApi
public void setFileTransferConnectionTimeout(
long timeout ) {
setTempProperty(FILE_TRANSFER_CONNECTION_TIMEOUT, Long.toString(timeout));
}
/**
* Set the file transfer connection interval
*
* @param defaultMessageBox the file transfer connection timeout
*/
@PublicAtsApi
public void setFileTransferConnectionInterval(
long interval ) {
setTempProperty(FILE_TRANSFER_CONNECTION_INTERVAL, Long.toString(interval));
}
/**
* Get the file transfer connection interval
*
* @return the file transfer connection timeout
*/
@PublicAtsApi
public long getFileTransferConnectionInterval() {
return getLongProperty(FILE_TRANSFER_CONNECTION_INTERVAL);
}
/**
* Set the file transfer connection initial delay
*
* @param defaultMessageBox the file transfer connection timeout
*/
@PublicAtsApi
public void setFileTransferConnectionInitialDelay(
long delay ) {
setTempProperty(FILE_TRANSFER_CONNECTION_INITIAL_DELAY, Long.toString(delay));
}
/**
* Get the file transfer connection initial delay
*
* @return the file transfer connection timeout
*/
@PublicAtsApi
public long getFileTransferConnectionInitialDelay() {
return getLongProperty(FILE_TRANSFER_CONNECTION_INITIAL_DELAY);
}
/**
* Set the default HTTPS encryption protocols, for example "TLSv1.2".
* You can specify more than one by using ',' as a delimiter
*
* @param protocol the encryption protocols
*/
@PublicAtsApi
public void setFileTransferDefaultHttpsEncryptionProtocols(
String protocols ) {
CoreLibraryConfigurator.getInstance().setFileTransferDefaultHttpsEncryptionProtocols(protocols);
}
/**
* Get the default HTTPS encryption protocols
*
* @return the encryption protocols
*/
@PublicAtsApi
public String getFileTransferDefaultHttpsEncryptionProtocols() {
return CoreLibraryConfigurator.getInstance().getFileTransferDefaultHttpsEncryptionProtocols();
}
/**
* Set the default HTTPS encryption cipher suites.
* You can specify more than one by using ',' as a delimiter
*
* @param protocol the cipher suites
*/
@PublicAtsApi
public void setFileTransferDefaultHttpsCipherSuites(
String cipherSuites ) {
CoreLibraryConfigurator.getInstance().setFileTransferDefaultHttpsCipherSuites(cipherSuites);
}
/**
* Get the default HTTPS encryption cipher suites
*
* @return the cipher suites
*/
@PublicAtsApi
public String getFileTransferDefaultHttpsCipherSuites() {
return CoreLibraryConfigurator.getInstance().getFileTransferDefaultHttpsCipherSuites();
}
/**
* Set the starting point (lowest port number)
* to try to allocate for non-local file system copy operations
*
* @param startPort starting range port
*/
@PublicAtsApi
public void setCopyFileStartPort(
int startPort ) {
setTempProperty(FILE_SYSTEM_COPY_FILE_START_PORT, Integer.toString(startPort));
}
/**
* Get the starting point (lowest port number) for non-local file system copy operations
*
* @return starting range port
*/
@PublicAtsApi
public String getCopyFileStartPort() {
return getProperty(FILE_SYSTEM_COPY_FILE_START_PORT);
}
/**
* Set the ending point (highest port number)
* to try to allocate for non-local file system copy operations
*
* @param endPort ending range port
*/
@PublicAtsApi
public void setCopyFileEndPort(
int endPort ) {
setTempProperty(FILE_SYSTEM_COPY_FILE_END_PORT, Integer.toString(endPort));
}
/**
* Get the ending point (highest port number) for non-local file system copy operations
*
* @return ending range port
*/
@PublicAtsApi
public String getCopyFileEndPort() {
return getProperty(FILE_SYSTEM_COPY_FILE_END_PORT);
}
/**
* Set the mail(SMTP) Server
*
* @param mailHost the mail Server
*/
@PublicAtsApi
public void setMailHost(
String mailHost ) {
setTempProperty(MAIL_HOST, mailHost);
}
/**
* Get the mail(SMTP) Server
*
* @return the mail Server
*/
@PublicAtsApi
public String getMailHost() {
return getProperty(MAIL_HOST);
}
/**
* Set the port of the mail Server
*
* @param mailPort the port of the mail Server
*/
@PublicAtsApi
public void setMailPort(
long mailPort ) {
setTempProperty(MAIL_PORT, Long.toString(mailPort));
}
/**
* Get the port of the mail Server
*
* @return the port of the mail Server
*/
@PublicAtsApi
public long getMailPort() {
return getLongProperty(MAIL_PORT);
}
/**
* Set the mail sending timeout.
* It is used for example when sending a mail over SMTP.
*
* @param mailTimeout the mail send timeout
*/
@PublicAtsApi
public void setMailTimeout(
long mailTimeout ) {
setTempProperty(MAIL_TIMEOUT, Long.toString(mailTimeout));
}
/**
* Set the mail local host address. This is the local address to bind to when creating the SMTP socket.
* Defaults to the address picked by the Socket class.
* Should not normally need to be set, but useful with multi-homed hosts where it's important to pick a particular local address to bind to.
*
* @param localAddress the local address
*/
@PublicAtsApi
public void setMailLocalAddress(
String localAddress ) {
setTempProperty(MAIL_LOCAL_ADDRESS, localAddress);
}
/**
* Get the mail local address
* @return the mail local address
*/
@PublicAtsApi
public String getMailLocalAddress() {
return getProperty(MAIL_LOCAL_ADDRESS);
}
/**
* Get the mail sending timeout.
* It is used for example when sending a message over SMTP.
*
* @return the mail sending timeout
*/
@PublicAtsApi
public long getMailTimeout() {
return getLongProperty(MAIL_TIMEOUT);
}
/**
* Set mail session in debug mode
*
* @param debugMode the mail session debug mode
*/
@PublicAtsApi
public void setMailSessionDebugMode(
boolean debugMode ) {
setTempProperty(MAIL_SESSION_DEBUG_MODE, Boolean.toString(debugMode));
}
/**
* Get if the mail session is in debug mode
*
* @return
*/
@PublicAtsApi
public boolean getMailSessionDebugMode() {
return getBooleanProperty(MAIL_SESSION_DEBUG_MODE);
}
/**
* Get the level of nested packages we parse when loading a MIME package
*
* @return
*/
@PublicAtsApi
public int getMimePackageMaxNestedLevel() {
return getIntegerProperty(MIMEPACKAGE_MAX_NESTED_LEVEL);
}
/**
* Set the level of nested packages we parse when loading a MIME package
*
* @param maxNestedLevel the max nested level
*/
@PublicAtsApi
public void setMimePackageMaxNestedLevel(
int maxNestedLevel ) {
setTempProperty(MIMEPACKAGE_MAX_NESTED_LEVEL, Integer.toString(maxNestedLevel));
}
@PublicAtsApi
public String getRestDefaultRequestMediaType() {
return getProperty(REST_DEFAULT_REQUEST_MEDIA_TYPE);
}
@PublicAtsApi
public void setRestDefaultRequestMediaType(
String mediaType ) {
setTempProperty(REST_DEFAULT_REQUEST_MEDIA_TYPE, mediaType);
}
@PublicAtsApi
public String getRestDefaultRequestMediaCharset() {
return getProperty(REST_DEFAULT_REQUEST_MEDIA_CHARSET);
}
@PublicAtsApi
public void setRestDefaultRequestMediaCharset(
String mediaType ) {
setTempProperty(REST_DEFAULT_REQUEST_MEDIA_CHARSET, mediaType);
}
@PublicAtsApi
public String getRestDefaultResponseMediaType() {
return getProperty(REST_DEFAULT_RESPONSE_MEDIA_TYPE);
}
@PublicAtsApi
public void setRestDefaultResponseMediaType(
String mediaType ) {
setTempProperty(REST_DEFAULT_RESPONSE_MEDIA_TYPE, mediaType);
}
@PublicAtsApi
public String getRestDefaultResponseMediaCharset() {
return getProperty(REST_DEFAULT_RESPONSE_MEDIA_CHARSET);
}
@PublicAtsApi
public void setRestDefaultResponseMediaCharset(
String mediaType ) {
setTempProperty(REST_DEFAULT_RESPONSE_MEDIA_CHARSET, mediaType);
}
@PublicAtsApi
public boolean getRestKeepRequestMediaType() {
return getBooleanProperty(REST_KEEP_REQUEST_MEDIA_TYPE);
}
@PublicAtsApi
public void setRestKeepRequestMediaType(
boolean keepRequestMediaType ) {
setTempProperty(REST_KEEP_REQUEST_MEDIA_TYPE, Boolean.toString(keepRequestMediaType));
}
@PublicAtsApi
public boolean getRestKeepRequestMediaCharset() {
return getBooleanProperty(REST_KEEP_REQUEST_MEDIA_CHARSET);
}
@PublicAtsApi
public void setRestKeepRequestMediaCharset(
boolean keepRequestMediaCharset ) {
setTempProperty(REST_KEEP_REQUEST_MEDIA_CHARSET, Boolean.toString(keepRequestMediaCharset));
}
@PublicAtsApi
public boolean getRestKeepResponseMediaType() {
return getBooleanProperty(REST_KEEP_RESPONSE_MEDIA_TYPE);
}
@PublicAtsApi
public void setRestKeepResponseMediaType(
boolean keepResponseMediaType ) {
setTempProperty(REST_KEEP_RESPONSE_MEDIA_TYPE, Boolean.toString(keepResponseMediaType));
}
@PublicAtsApi
public boolean getRestKeepResponseMediaCharset() {
return getBooleanProperty(REST_KEEP_RESPONSE_MEDIA_CHARSET);
}
@PublicAtsApi
public void setRestKeepResponseMediaCharset(
boolean keepResponseMediaCharset ) {
setTempProperty(REST_KEEP_RESPONSE_MEDIA_CHARSET, Boolean.toString(keepResponseMediaCharset));
}
@PublicAtsApi
public boolean getRestKeepResourcePath() {
return getBooleanProperty(REST_KEEP_RESOURCE_PATH);
}
@PublicAtsApi
public void setRestKeepResourcePath(
boolean keepResourcePath ) {
setTempProperty(REST_KEEP_RESOURCE_PATH, Boolean.toString(keepResourcePath));
}
@PublicAtsApi
public boolean getRestKeepRequestHeaders() {
return getBooleanProperty(REST_KEEP_REQUEST_HEADERS);
}
@PublicAtsApi
public void setRestKeepRequestHeaders(
boolean keepRequestHeaders ) {
setTempProperty(REST_KEEP_REQUEST_HEADERS, Boolean.toString(keepRequestHeaders));
}
@PublicAtsApi
public boolean getRestKeepRequestParameters() {
return getBooleanProperty(REST_KEEP_REQUEST_PARAMETERS);
}
@PublicAtsApi
public void setRestKeepRequestParameters(
boolean keepRequestParameters ) {
setTempProperty(REST_KEEP_REQUEST_PARAMETERS, Boolean.toString(keepRequestParameters));
}
@PublicAtsApi
public boolean getHttpKeepRequestHeaders() {
return getBooleanProperty(HTTP_KEEP_REQUEST_HEADERS);
}
@PublicAtsApi
public void setHttpKeepRequestHeaders(
boolean keepRequestHeaders ) {
setTempProperty(HTTP_KEEP_REQUEST_HEADERS, Boolean.toString(keepRequestHeaders));
}
@PublicAtsApi
public boolean getHttpKeepRequestParameters() {
return getBooleanProperty(HTTP_KEEP_REQUEST_PARAMETERS);
}
@PublicAtsApi
public void setHttpKeepRequestParameters(
boolean keepRequestParameters ) {
setTempProperty(HTTP_KEEP_REQUEST_PARAMETERS, Boolean.toString(keepRequestParameters));
}
@PublicAtsApi
public boolean getHttpKeepRequestBody() {
return getBooleanProperty(HTTP_KEEP_REQUEST_BODY);
}
@PublicAtsApi
public void setHttpKeepRequestBody(
boolean keepRequestBody ) {
setTempProperty(HTTP_KEEP_REQUEST_BODY, Boolean.toString(keepRequestBody));
}
/**
* Settings for file snapshots
*/
public class FileSnapshots {
@PublicAtsApi
public boolean getCheckModificationTime() {
try {
return getBooleanProperty(FILE_SNAPSHOT_CHECK_MODIFICATION_TIME);
} catch (NoSuchPropertyException nspe) {
return true;
}
}
@PublicAtsApi
public void setCheckModificationTime( boolean checkModificationTime ) {
setTempProperty(FILE_SNAPSHOT_CHECK_MODIFICATION_TIME,
Boolean.toString(checkModificationTime));
}
@PublicAtsApi
public boolean getCheckFileSize() {
try {
return getBooleanProperty(FILE_SNAPSHOT_CHECK_SIZE);
} catch (NoSuchPropertyException nspe) {
return true;
}
}
@PublicAtsApi
public void setCheckFileSize( boolean checkSize ) {
setTempProperty(FILE_SNAPSHOT_CHECK_SIZE, Boolean.toString(checkSize));
}
@PublicAtsApi
public boolean getCheckFileMd5() {
try {
return getBooleanProperty(FILE_SNAPSHOT_CHECK_MD5);
} catch (NoSuchPropertyException nspe) {
return true;
}
}
@PublicAtsApi
public void setCheckFileMd5( boolean checkMd5 ) {
setTempProperty(FILE_SNAPSHOT_CHECK_MD5, Boolean.toString(checkMd5));
}
@PublicAtsApi
public boolean getCheckFilePermissions() {
try {
return getBooleanProperty(FILE_SNAPSHOT_CHECK_PERMISSIONS);
} catch (NoSuchPropertyException nspe) {
return true;
}
}
@PublicAtsApi
public void setCheckFilePermissions( boolean checkPermissions ) {
setTempProperty(FILE_SNAPSHOT_CHECK_PERMISSIONS, Boolean.toString(checkPermissions));
}
@PublicAtsApi
public boolean getSupportHiddenFiles() {
try {
return getBooleanProperty(FILE_SNAPSHOT_SUPPORT_HIDDEN);
} catch (NoSuchPropertyException nspe) {
return true;
}
}
@PublicAtsApi
public void setSupportHiddenFiles( boolean supportHiddenFiles ) {
setTempProperty(FILE_SNAPSHOT_SUPPORT_HIDDEN, Boolean.toString(supportHiddenFiles));
}
@PublicAtsApi
public boolean getCheckPropertiesFilesContent() {
try {
return getBooleanProperty(FILE_SNAPSHOT_CHECK_PROPERTIES_FILES_CONTENT);
} catch (NoSuchPropertyException nspe) {
return true;
}
}
@PublicAtsApi
public void setCheckPropertiesFilesContent( boolean checkPropertiesFilesContent ) {
setTempProperty(FILE_SNAPSHOT_CHECK_PROPERTIES_FILES_CONTENT,
Boolean.toString(checkPropertiesFilesContent));
}
@PublicAtsApi
public boolean getCheckXmlFilesContent() {
try {
return getBooleanProperty(FILE_SNAPSHOT_CHECK_XML_FILES_CONTENT);
} catch (NoSuchPropertyException nspe) {
return true;
}
}
@PublicAtsApi
public void setCheckXmlFilesContent( boolean checkXmlFilesContent ) {
setTempProperty(FILE_SNAPSHOT_CHECK_XML_FILES_CONTENT,
Boolean.toString(checkXmlFilesContent));
}
@PublicAtsApi
public boolean getCheckIniFilesContent() {
try {
return getBooleanProperty(FILE_SNAPSHOT_CHECK_INI_FILES_CONTENT);
} catch (NoSuchPropertyException nspe) {
return true;
}
}
@PublicAtsApi
public void setCheckIniFilesContent( boolean checkIniFilesContent ) {
setTempProperty(FILE_SNAPSHOT_CHECK_INI_FILES_CONTENT,
Boolean.toString(checkIniFilesContent));
}
@PublicAtsApi
public boolean getCheckTextFilesContent() {
try {
return getBooleanProperty(FILE_SNAPSHOT_CHECK_TEXT_FILES_CONTENT);
} catch (NoSuchPropertyException nspe) {
return true;
}
}
@PublicAtsApi
public void setCheckTextFilesContent( boolean checkTextFilesContent ) {
setTempProperty(FILE_SNAPSHOT_CHECK_TEXT_FILES_CONTENT,
Boolean.toString(checkTextFilesContent));
}
@PublicAtsApi
public char getIniFilesStartSectionChar() {
try {
return getCharProperty(FILE_SNAPSHOT_INI_FILES_SECTION_START_CHAR);
} catch (NoSuchPropertyException nspe) {
return '['; // use default char
}
}
@PublicAtsApi
public void setIniFilesStartSectionChar( char startSectionChar ) {
setTempProperty(FILE_SNAPSHOT_INI_FILES_SECTION_START_CHAR,
Character.toString(startSectionChar));
}
@PublicAtsApi
public char getIniFilesStartCommentChar() {
try {
return getCharProperty(FILE_SNAPSHOT_INI_FILES_COMMENT_START_CHAR);
} catch (NoSuchPropertyException nspe) {
return '#'; // use default char
}
}
@PublicAtsApi
public void setIniFilesStartCommentChar( char startCommentChar ) {
setTempProperty(FILE_SNAPSHOT_INI_FILES_COMMENT_START_CHAR,
Character.toString(startCommentChar));
}
@PublicAtsApi
public char getIniFilesDelimeterChar() {
try {
return getCharProperty(FILE_SNAPSHOT_INI_FILES_DELIMETER_CHAR);
} catch (NoSuchPropertyException nspe) {
return '='; // use default char
}
}
@PublicAtsApi
public void setIniFilesDelimeterChar( char delimeterChar ) {
setTempProperty(FILE_SNAPSHOT_INI_FILES_DELIMETER_CHAR, Character.toString(delimeterChar));
}
/**
* @return files extensions for files treated as Properties files
*/
public String getPropertiesFileExtensions() {
String extensions = getOptionalProperty(FILE_SNAPSHOT_PROPERTIES_FILE_EXTENSIONS);
if (StringUtils.isNullOrEmpty(extensions)) {
return "";
} else {
return extensions;
}
}
/**
* Set file extensions that will be treated as Properties files.
* Default extension is '.properties'
* @param extensions new extensions
*/
public void setPropertiesFileExtensions( String[] extensions ) {
StringBuilder extensionsList = new StringBuilder();
for (String extension : extensions) {
if (StringUtils.isNullOrEmpty(extension) || extension.contains(",")) {
throw new ConfigurationException("File types cannot be empty nor can contain the ',' character. You have supplied '"
+ extension + ",");
}
extensionsList.append(extension).append(",");
}
setTempProperty(FILE_SNAPSHOT_PROPERTIES_FILE_EXTENSIONS,
extensionsList.substring(0, extensionsList.length() - 1));
}
/**
* @return files extensions for files treated as XML files
*/
public String getXmlFileExtensions() {
return getOptionalProperty(FILE_SNAPSHOT_XML_FILE_EXTENSIONS);
}
/**
* Set file extensions that will be treated as XML files.
* Default extension is '.xml'
* @param extensions new extensions
*/
public void setXmlFileExtensions( String[] extensions ) {
StringBuilder extensionsList = new StringBuilder();
for (String extension : extensions) {
if (StringUtils.isNullOrEmpty(extension) || extension.contains(",")) {
throw new ConfigurationException("File types cannot be empty nor can contain the ',' character. You have supplied '"
+ extension + ",");
}
extensionsList.append(extension).append(",");
}
setTempProperty(FILE_SNAPSHOT_XML_FILE_EXTENSIONS,
extensionsList.substring(0, extensionsList.length() - 1));
}
/**
* @return files extensions for files treated as INI files
*/
public String getIniFileExtensions() {
String extensions = getOptionalProperty(FILE_SNAPSHOT_INI_FILE_EXTENSIONS);
if (StringUtils.isNullOrEmpty(extensions)) {
return "";
} else {
return extensions;
}
}
/**
* Set file extensions that will be treated as INI files.
* Default extension is '.ini'
* @param extensions new extensions
*/
public void setIniFileExtensions( String... extensions ) {
StringBuilder extensionsList = new StringBuilder();
for (String extension : extensions) {
if (StringUtils.isNullOrEmpty(extension) || extension.contains(",")) {
throw new ConfigurationException("File types cannot be empty nor can contain the ',' character. You have supplied '"
+ extension + ",");
}
extensionsList.append(extension.trim().toLowerCase()).append(",");
}
setTempProperty(FILE_SNAPSHOT_INI_FILE_EXTENSIONS,
extensionsList.substring(0, extensionsList.length() - 1));
}
/**
* @return files extensions for files treated as Text files
*/
public String getTextFileExtensions() {
String extensions = getOptionalProperty(FILE_SNAPSHOT_TEXT_FILE_EXTENSIONS);
if (StringUtils.isNullOrEmpty(extensions)) {
return "";
} else {
return extensions;
}
}
/**
* Set file extensions that will be treated as Text files.
* Default extension is '.txt'
* @param extensions new extensions
*/
public void setTextFileExtensions( String... extensions ) {
StringBuilder extensionsList = new StringBuilder();
for (String extension : extensions) {
if (StringUtils.isNullOrEmpty(extension) || extension.contains(",")) {
throw new ConfigurationException("File types cannot be empty nor can contain the ',' character. You have supplied '"
+ extension + ",");
}
extensionsList.append(extension.trim().toLowerCase()).append(",");
}
setTempProperty(FILE_SNAPSHOT_TEXT_FILE_EXTENSIONS,
extensionsList.substring(0, extensionsList.length() - 1));
}
}
@Override
protected void reloadData() {
// nothing to do here
}
}
| |
/*
Copyright 2014-2016 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apple.uikit;
import apple.NSObject;
import apple.foundation.NSArray;
import apple.foundation.NSCoder;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import apple.foundation.protocol.NSCoding;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
@Generated
@Library("UIKit")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class UIBarButtonItemGroup extends NSObject implements NSCoding {
static {
NatJ.register();
}
@Generated
protected UIBarButtonItemGroup(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native UIBarButtonItemGroup alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native UIBarButtonItemGroup allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native UIBarButtonItemGroup new_objc();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("version")
@NInt
public static native long version_static();
/**
* The bar button items associated with this group. Changing these items will affect the bar displaying these items without needing to re-set the groups that are in that bar. Any UIBarButtonItems that are already in group will be removed from that group.
*/
@Generated
@Selector("barButtonItems")
public native NSArray<? extends UIBarButtonItem> barButtonItems();
@Generated
@Selector("encodeWithCoder:")
public native void encodeWithCoder(NSCoder coder);
@Generated
@Selector("init")
public native UIBarButtonItemGroup init();
/**
* Create a new bar button item group with the given items. When bar button item layout is done, either the group's barButtonItems or its representativeItem is displayed (if it exists).
*/
@Generated
@Selector("initWithBarButtonItems:representativeItem:")
public native UIBarButtonItemGroup initWithBarButtonItemsRepresentativeItem(
NSArray<? extends UIBarButtonItem> barButtonItems, UIBarButtonItem representativeItem);
@Generated
@Selector("initWithCoder:")
public native UIBarButtonItemGroup initWithCoder(NSCoder coder);
/**
* Returns YES if the representativeItem of this group is currently being displayed, rather than its barButtonItems.
*/
@Generated
@Selector("isDisplayingRepresentativeItem")
public native boolean isDisplayingRepresentativeItem();
/**
* In order to display as many items as possible, bars that support UIBarButtonItemGroup may choose to collapse items associated with groups to the representativeItem specified by the group.
* A bar will only collapse groups that have a representativeItem set, but may still choose to use an alternate presentation of these items.
* A UIBarButtonItem may only be either the representativeItem or a member of the barButtonItems of a single UIBarButtonItemGroup and may only represent a single group.
* If the representativeItem has an action, then that action will be invoked, otherwise the bar will present a standard UI to allow selection of the barButtonItems in the representedItem's group.
*/
@Generated
@Selector("representativeItem")
public native UIBarButtonItem representativeItem();
/**
* The bar button items associated with this group. Changing these items will affect the bar displaying these items without needing to re-set the groups that are in that bar. Any UIBarButtonItems that are already in group will be removed from that group.
*/
@Generated
@Selector("setBarButtonItems:")
public native void setBarButtonItems(NSArray<? extends UIBarButtonItem> value);
/**
* In order to display as many items as possible, bars that support UIBarButtonItemGroup may choose to collapse items associated with groups to the representativeItem specified by the group.
* A bar will only collapse groups that have a representativeItem set, but may still choose to use an alternate presentation of these items.
* A UIBarButtonItem may only be either the representativeItem or a member of the barButtonItems of a single UIBarButtonItemGroup and may only represent a single group.
* If the representativeItem has an action, then that action will be invoked, otherwise the bar will present a standard UI to allow selection of the barButtonItems in the representedItem's group.
*/
@Generated
@Selector("setRepresentativeItem:")
public native void setRepresentativeItem(UIBarButtonItem value);
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.engine.index ;
import static org.apache.jena.reasoner.rulesys.Util.makeIntNode ;
import static org.junit.Assert.assertEquals ;
import static org.junit.Assert.assertTrue ;
import static org.junit.Assert.assertFalse ;
import static org.junit.Assert.fail ;
import java.util.ArrayList ;
import java.util.Collections ;
import java.util.LinkedHashSet ;
import java.util.List ;
import java.util.Map ;
import java.util.Set ;
import org.apache.jena.sparql.core.Var ;
import org.apache.jena.sparql.engine.QueryIterator ;
import org.apache.jena.sparql.engine.binding.Binding ;
import org.apache.jena.sparql.engine.binding.BindingHashMap ;
import org.apache.jena.sparql.engine.index.HashIndexTable ;
import org.apache.jena.sparql.engine.index.IndexFactory ;
import org.apache.jena.sparql.engine.index.IndexTable ;
import org.apache.jena.sparql.engine.index.LinearIndex ;
import org.apache.jena.sparql.engine.index.HashIndexTable.Key ;
import org.apache.jena.sparql.engine.index.HashIndexTable.MissingBindingException ;
import org.apache.jena.sparql.engine.iterator.QueryIterPlainWrapper ;
import org.junit.Test ;
import org.junit.Before ;
/**
* Tests the {@link org.apache.jena.sparql.engine.index.HashIndexTable} and
* {@link org.apache.jena.sparql.engine.index.LinearIndex} classes. Also tests
* that the {@link org.apache.jena.sparql.engine.index.IndexFactory} instantiates
* the correct type of index depending on the data.
*
* Contribution from Paul Gearon
*/
public class TestIndexTable {
private Var[] vars ;
// sets of vars with different iteration orders
private Set<Var> order1 ;
private Set<Var> order2 ;
private List<Binding> fData ;
private List<Binding> pData ;
@Before
public void setup()
{
vars = new Var[] { Var.alloc("a"), Var.alloc("b"), Var.alloc("c") } ;
order1 = new LinkedHashSet<>() ;
order2 = new LinkedHashSet<>() ;
for ( int i = 0 ; i < vars.length ; i++ )
{
order1.add(vars[i]) ;
order2.add(vars[vars.length - i - 1]) ;
}
fData = new ArrayList<>() ;
pData = new ArrayList<>() ;
for ( int i = 10 ; i <= 100 ; i += 10 )
{
BindingHashMap bindingFull = new BindingHashMap() ;
BindingHashMap bindingPart = new BindingHashMap() ;
for ( int b = 0 ; b < vars.length ; b++ )
{
bindingFull.add(vars[b], makeIntNode(i + b)) ; // 10,11,12 - 20,21,22 - 30,31,32 ... 100,101,102
if ( (i + b) % 7 != 0 ) bindingPart.add(vars[b], makeIntNode(i + b)) ; // skips 21, 42, 70, 91
}
fData.add(bindingFull) ;
pData.add(bindingPart) ;
}
}
@Test
public void testHashIndexTableConstruction() throws Exception
{
new HashIndexTable(order1, fullData()) ;
assertTrue(IndexFactory.createIndex(order1, fullData()) instanceof HashIndexTable) ;
assertTrue(IndexFactory.createIndex(order1, partData()) instanceof LinearIndex) ;
try {
new HashIndexTable(order1, partData()) ;
fail("Index built without failure on partial bindings") ;
} catch (MissingBindingException e)
{
// check that the expected mapping occurred
Map<Var,Integer> map = e.getMap() ;
for ( int i = 0 ; i < vars.length ; i++ )
{
assertEquals(Integer.valueOf(i), map.get(vars[i])) ;
}
// check for rows of {a=10,b=11,c=12}, {a=20,c=22}
Set<Key> data = e.getData() ;
assertEquals(2, data.size()) ;
for ( Key key: data )
{
Binding b = LinearIndex.toBinding(key, map) ;
if ( b.size() == 3 )
{
for ( int i = 0 ; i < vars.length ; i++ )
assertEquals(b.get(vars[i]), makeIntNode(10 + i)) ;
} else
{
assertEquals(b.get(vars[0]), makeIntNode(20)) ;
assertEquals(b.get(vars[2]), makeIntNode(22)) ;
}
}
}
}
@Test
public void testHashIndexTableData() throws Exception
{
// test twice with different internal mappings
testTableData(new HashIndexTable(order1, fullData())) ;
testTableData(new HashIndexTable(order2, fullData())) ;
}
@Test
public void testLinearIndexTableData()
{
// test twice with different internal mappings
testTableData(IndexFactory.createIndex(order1, partData())) ;
testTableData(IndexFactory.createIndex(order2, partData())) ;
// test the linear index with full data, since this should also work
Set<Key> emptyKeys = Collections.emptySet() ;
Map<Var,Integer> emptyMapping = Collections.emptyMap() ;
testTableData(new LinearIndex(order1, fullData(), emptyKeys, emptyMapping)) ;
testTableData(new LinearIndex(order2, fullData(), emptyKeys, emptyMapping)) ;
// construction directly from part data should also work
testTableData(new LinearIndex(order1, partData(), emptyKeys, emptyMapping)) ;
testTableData(new LinearIndex(order2, partData(), emptyKeys, emptyMapping)) ;
}
private void testTableData(IndexTable index)
{
// positive test for matching
for ( Binding b: fData )
assertTrue(index.containsCompatibleWithSharedDomain(b)) ;
assertTrue(index.containsCompatibleWithSharedDomain(binding("abcd", 10, 11, 12, 13))) ;
assertTrue(index.containsCompatibleWithSharedDomain(binding("ab", 10, 11))) ;
assertTrue(index.containsCompatibleWithSharedDomain(binding("bc", 11, 12))) ;
assertTrue(index.containsCompatibleWithSharedDomain(binding("ac", 10, 12))) ;
assertTrue(index.containsCompatibleWithSharedDomain(binding("a", 10))) ;
assertTrue(index.containsCompatibleWithSharedDomain(binding("ab", 70, 71))) ;
assertTrue(index.containsCompatibleWithSharedDomain(binding("bc", 71, 72))) ;
assertTrue(index.containsCompatibleWithSharedDomain(binding("ac", 70, 72))) ;
assertTrue(index.containsCompatibleWithSharedDomain(binding("a", 80))) ; // a=70 won't match for partData
// negative test for matching
assertFalse(index.containsCompatibleWithSharedDomain(binding("abc", 10, 11, 11))) ;
assertFalse(index.containsCompatibleWithSharedDomain(binding("d", 10))) ;
assertFalse(index.containsCompatibleWithSharedDomain(binding("abc", 10, 21, 32))) ;
assertFalse(index.containsCompatibleWithSharedDomain(binding("xyz", 10, 11, 12))) ;
}
private QueryIterator fullData() { return new QueryIterPlainWrapper(fData.iterator()) ; }
private QueryIterator partData() { return new QueryIterPlainWrapper(pData.iterator()) ; }
/**
* A convenience method that creates a binding of Vars with single letter names bound to integers.
* @param varNames A string of variable names. The length must match the number of integers to bind to.
* @param ints The values of the integers to be bound to the variables.
*/
private static Binding binding(String varNames, Integer... ints)
{
assert varNames.length() == ints.length ;
BindingHashMap b = new BindingHashMap() ;
for ( int s = 0 ; s < varNames.length() ; s++ )
b.add(Var.alloc(varNames.substring(s, s + 1)), makeIntNode(ints[s])) ;
return b ;
}
}
| |
// Copyright 2017 Twitter. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.twitter.heron.apiserver.resources;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.FormParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataMultiPart;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.twitter.heron.apiserver.Constants;
import com.twitter.heron.apiserver.actions.ActionFactory;
import com.twitter.heron.apiserver.actions.ActionFactoryImpl;
import com.twitter.heron.apiserver.actions.ActionType;
import com.twitter.heron.apiserver.actions.Keys;
import com.twitter.heron.apiserver.utils.ConfigUtils;
import com.twitter.heron.apiserver.utils.FileHelper;
import com.twitter.heron.apiserver.utils.Logging;
import com.twitter.heron.apiserver.utils.Utils;
import com.twitter.heron.common.basics.DryRunFormatType;
import com.twitter.heron.common.basics.FileUtils;
import com.twitter.heron.common.basics.Pair;
import com.twitter.heron.scheduler.dryrun.DryRunResponse;
import com.twitter.heron.scheduler.dryrun.SubmitDryRunResponse;
import com.twitter.heron.scheduler.dryrun.UpdateDryRunResponse;
import com.twitter.heron.scheduler.utils.DryRunRenders;
import com.twitter.heron.spi.common.Config;
import com.twitter.heron.spi.common.Key;
@Path("/topologies")
public class TopologyResource extends HeronResource {
private static final Logger LOG = LoggerFactory.getLogger(TopologyResource.class);
private static final String TOPOLOGY_TAR_GZ_FILENAME = "topology.tar.gz";
private static final int HTTP_UNPROCESSABLE_ENTITY_CODE = 422;
private static final String FORM_KEY_NAME = "name";
private static final String FORM_KEY_CLUSTER = "cluster";
private static final String FORM_KEY_ROLE = "role";
private static final String FORM_KEY_ENVIRONMENT = "environment";
private static final String FORM_KEY_DEFINITION = "definition";
private static final String FORM_KEY_TOPOLOGY = "topology";
private static final String FORM_KEY_USER = "user";
private static final Set<String> SUBMIT_TOPOLOGY_PARAMS = Collections.unmodifiableSet(
new HashSet<>(
Arrays.asList(
FORM_KEY_NAME,
FORM_KEY_CLUSTER,
FORM_KEY_ROLE,
FORM_KEY_ENVIRONMENT,
FORM_KEY_DEFINITION,
FORM_KEY_TOPOLOGY,
FORM_KEY_USER
)
)
);
private static final String[] REQUIRED_SUBMIT_TOPOLOGY_PARAMS = {
FORM_KEY_NAME,
FORM_KEY_CLUSTER,
FORM_KEY_ROLE,
FORM_KEY_DEFINITION,
FORM_KEY_TOPOLOGY
};
private static final String PARAM_COMPONENT_PARALLELISM = "component_parallelism";
private static final String PARAM_RUNTIME_CONFIG_KEY = "runtime_config";
private static final String PARAM_DRY_RUN = "dry_run";
private static final String PARAM_DRY_RUN_FORMAT = "dry_run_format";
private static final String DEFAULT_DRY_RUN_FORMAT = DryRunFormatType.TABLE.toString();
// path format /topologies/{cluster}/{role}/{environment}/{name}
private static final String TOPOLOGY_PATH_FORMAT = "/topologies/%s/%s/%s/%s";
private final ActionFactory actionFactory = new ActionFactoryImpl();
@POST
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
@SuppressWarnings({"IllegalCatch", "JavadocMethod"})
public Response submit(FormDataMultiPart form) throws IOException {
// verify that all we have all the required params
final List<String> missingDataKeys =
verifyKeys(form.getFields().keySet(), REQUIRED_SUBMIT_TOPOLOGY_PARAMS);
if (!missingDataKeys.isEmpty()) {
// return error since we are missing required parameters
final String message = String.format("Validation failed missing required params: %s",
missingDataKeys.toString());
return Response.status(HTTP_UNPROCESSABLE_ENTITY_CODE)
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createValidationError(message, missingDataKeys))
.build();
}
final String cluster = Forms.getString(form, FORM_KEY_CLUSTER);
if (!doesClusterMatch(cluster)) {
return Response.status(HTTP_UNPROCESSABLE_ENTITY_CODE)
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(String.format("Unknown cluster %s expecting '%s'",
cluster, getCluster())))
.build();
}
final String topologyName = Forms.getString(form, FORM_KEY_NAME);
final String role = Forms.getString(form, FORM_KEY_ROLE);
final String environment =
Forms.getString(form, FORM_KEY_ENVIRONMENT, Constants.DEFAULT_HERON_ENVIRONMENT);
final String user = Forms.getString(form, FORM_KEY_USER, role);
// submit overrides are passed key=value
final Map<String, String> submitOverrides = getSubmitOverrides(form);
final String topologyDirectory =
Files.createTempDirectory(topologyName).toFile().getAbsolutePath();
try {
// upload the topology definition file to the topology directory
final FormDataBodyPart definitionFilePart = form.getField(FORM_KEY_DEFINITION);
final File topologyDefinitionFile = Forms.uploadFile(definitionFilePart, topologyDirectory);
// upload the topology binary file to the topology directory
final FormDataBodyPart topologyFilePart = form.getField(FORM_KEY_TOPOLOGY);
final File topologyBinaryFile = Forms.uploadFile(topologyFilePart, topologyDirectory);
final boolean isDryRun = form.getFields().containsKey(PARAM_DRY_RUN);
// copy configuration files to the sandbox config location
// topology-dir/<default-heron-sandbox-config>
FileHelper.copyDirectory(
Paths.get(getConfigurationDirectory()),
Paths.get(topologyDirectory, Constants.DEFAULT_HERON_SANDBOX_CONFIG));
final java.nio.file.Path overridesPath =
Paths.get(topologyDirectory, Constants.DEFAULT_HERON_SANDBOX_CONFIG,
Constants.OVERRIDE_FILE);
// copy override file into topology configuration directory
FileHelper.copy(Paths.get(getConfigurationOverridePath()), overridesPath);
// apply submit overrides
ConfigUtils.applyOverrides(overridesPath, submitOverrides);
// apply overrides to state manager config
ConfigUtils.applyOverridesToStateManagerConfig(overridesPath,
Paths.get(topologyDirectory, Constants.DEFAULT_HERON_SANDBOX_CONFIG,
Constants.STATE_MANAGER_FILE)
);
// create tar file from the contents of the topology directory
final File topologyPackageFile =
Paths.get(topologyDirectory, TOPOLOGY_TAR_GZ_FILENAME).toFile();
FileHelper.createTarGz(topologyPackageFile, FileHelper.getChildren(topologyDirectory));
//create configs
Config topologyConfig = ConfigUtils.getTopologyConfig(
topologyPackageFile.getAbsolutePath(),
topologyBinaryFile.getName(),
topologyDefinitionFile.getAbsolutePath());
List<Pair<String, Object>> val = new LinkedList<>();
for (Map.Entry<String, Object> entry : topologyConfig.getEntrySet()) {
val.add(Pair.create(entry.getKey(), entry.getValue()));
}
val.addAll(Arrays.asList(
Pair.create(Key.CLUSTER.value(), cluster),
Pair.create(Key.TOPOLOGY_NAME.value(), topologyName),
Pair.create(Key.ROLE.value(), role),
Pair.create(Key.ENVIRON.value(), environment),
Pair.create(Key.SUBMIT_USER.value(), user),
Pair.create(Key.DRY_RUN.value(), isDryRun)
));
final Config config = createConfig(val, submitOverrides);
// submit the topology
getActionFactory()
.createSubmitAction(config,
topologyPackageFile.getAbsolutePath(),
topologyBinaryFile.getName(),
topologyDefinitionFile.getAbsolutePath())
.execute();
return Response.created(
URI.create(String.format(TOPOLOGY_PATH_FORMAT,
cluster, role, environment, topologyName)))
.type(MediaType.APPLICATION_JSON)
.entity(createdResponse(cluster, role, environment, topologyName)).build();
} catch (SubmitDryRunResponse response) {
return createDryRunResponse(response,
Forms.getString(form, PARAM_DRY_RUN_FORMAT, DEFAULT_DRY_RUN_FORMAT));
} catch (Exception ex) {
LOG.error("error submitting topology {}", topologyName, ex);
return Response.serverError()
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(ex.getMessage()))
.build();
} finally {
FileUtils.deleteDir(topologyDirectory);
}
}
@POST
@Path("/{cluster}/{role}/{environment}/{name}/activate")
@Produces(MediaType.APPLICATION_JSON)
@SuppressWarnings("IllegalCatch")
public Response activate(
final @PathParam("cluster") String cluster,
final @PathParam("role") String role,
final @PathParam("environment") String environment,
final @PathParam("name") String name) {
try {
final Config config = getConfig(cluster, role, environment, name);
getActionFactory().createRuntimeAction(config, ActionType.ACTIVATE).execute();
return Response.ok()
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(String.format("%s activated", name)))
.build();
} catch (Exception ex) {
LOG.error("error activating topology {}", name, ex);
return Response.serverError()
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(ex.getMessage()))
.build();
}
}
@POST
@Path("/{cluster}/{role}/{environment}/{name}/deactivate")
@Produces(MediaType.APPLICATION_JSON)
@SuppressWarnings("IllegalCatch")
public Response deactivate(
final @PathParam("cluster") String cluster,
final @PathParam("role") String role,
final @PathParam("environment") String environment,
final @PathParam("name") String name) {
try {
final Config config = getConfig(cluster, role, environment, name);
getActionFactory().createRuntimeAction(config, ActionType.DEACTIVATE).execute();
return Response.ok()
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(String.format("%s deactivated", name)))
.build();
} catch (Exception ex) {
LOG.error("error deactivating topology {}", name, ex);
return Response.serverError()
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(ex.getMessage()))
.build();
}
}
@POST
@Path("/{cluster}/{role}/{environment}/{name}/restart")
@Produces(MediaType.APPLICATION_JSON)
@SuppressWarnings("IllegalCatch")
public Response restart(
final @PathParam("cluster") String cluster,
final @PathParam("role") String role,
final @PathParam("environment") String environment,
final @PathParam("name") String name,
final @DefaultValue("-1") @FormParam("container_id") int containerId) {
try {
final List<Pair<String, Object>> keyValues = new ArrayList<>(
Arrays.asList(
Pair.create(Key.CLUSTER.value(), cluster),
Pair.create(Key.ROLE.value(), role),
Pair.create(Key.ENVIRON.value(), environment),
Pair.create(Key.TOPOLOGY_NAME.value(), name),
Pair.create(Key.TOPOLOGY_CONTAINER_ID.value(), containerId)
)
);
final Config config = createConfig(keyValues);
getActionFactory().createRuntimeAction(config, ActionType.RESTART).execute();
return Response.ok()
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(String.format("%s restarted", name)))
.build();
} catch (Exception ex) {
LOG.error("error restarting topology {}", name, ex);
return Response.serverError()
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(ex.getMessage()))
.build();
}
}
@POST
@Path("/{cluster}/{role}/{environment}/{name}/update")
@Produces(MediaType.APPLICATION_JSON)
@SuppressWarnings({"IllegalCatch", "JavadocMethod"})
public Response update(
final @PathParam("cluster") String cluster,
final @PathParam("role") String role,
final @PathParam("environment") String environment,
final @PathParam("name") String name,
MultivaluedMap<String, String> params) {
try {
if (params == null) {
return Response.status(HTTP_UNPROCESSABLE_ENTITY_CODE)
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage("no param"))
.build();
} else {
List<String> components = params.get(PARAM_COMPONENT_PARALLELISM);
List<String> runtimeConfigs = params.get(PARAM_RUNTIME_CONFIG_KEY);
if (components != null && !components.isEmpty()) {
return updateComponentParallelism(cluster, role, environment, name, params, components);
} else if (runtimeConfigs != null && !runtimeConfigs.isEmpty()) {
return updateRuntimeConfig(cluster, role, environment, name, params, runtimeConfigs);
} else {
return Response.status(HTTP_UNPROCESSABLE_ENTITY_CODE)
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage("missing component_parallelism or runtime_config param"))
.build();
}
}
} catch (UpdateDryRunResponse response) {
return createDryRunResponse(response,
Forms.getFirstOrDefault(params, PARAM_DRY_RUN_FORMAT, DEFAULT_DRY_RUN_FORMAT));
} catch (Exception ex) {
LOG.error("error updating topology {}", name, ex);
return Response.serverError()
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(ex.getMessage()))
.build();
}
}
protected Response updateComponentParallelism(
String cluster,
String role,
String environment,
String name,
MultivaluedMap<String, String> params,
List<String> components) {
final List<Pair<String, Object>> keyValues = new ArrayList<>(
Arrays.asList(
Pair.create(Key.CLUSTER.value(), cluster),
Pair.create(Key.ROLE.value(), role),
Pair.create(Key.ENVIRON.value(), environment),
Pair.create(Key.TOPOLOGY_NAME.value(), name),
Pair.create(Keys.PARAM_COMPONENT_PARALLELISM,
String.join(",", components))
)
);
// has a dry run been requested?
if (params.containsKey(PARAM_DRY_RUN)) {
keyValues.add(Pair.create(Key.DRY_RUN.value(), Boolean.TRUE));
}
final Set<Pair<String, Object>> overrides = getUpdateOverrides(params);
// apply overrides if they exists
if (!overrides.isEmpty()) {
keyValues.addAll(overrides);
}
final Config config = createConfig(keyValues);
getActionFactory().createRuntimeAction(config, ActionType.UPDATE).execute();
return Response.ok()
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(String.format("%s updated", name)))
.build();
}
protected Response updateRuntimeConfig(
String cluster,
String role,
String environment,
String name,
MultivaluedMap<String, String> params,
List<String> runtimeConfigs) {
// TODO(nwang): Implement runtime config in API server
return Response.ok()
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(String.format("%s updated", name)))
.build();
}
@DELETE
@Path("/{cluster}/{role}/{environment}/{name}")
@Produces(MediaType.APPLICATION_JSON)
@SuppressWarnings("IllegalCatch")
public Response kill(
final @PathParam("cluster") String cluster,
final @PathParam("role") String role,
final @PathParam("environment") String environment,
final @PathParam("name") String name) {
try {
final Config config = getConfig(cluster, role, environment, name);
getActionFactory().createRuntimeAction(config, ActionType.KILL).execute();
return Response.ok()
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(String.format("%s killed", name)))
.build();
} catch (Exception ex) {
LOG.error("error killing topology {}", name, ex);
final String message = ex.getMessage();
final Response.Status status = message != null && message.contains("does not exist")
? Response.Status.NOT_FOUND : Response.Status.INTERNAL_SERVER_ERROR;
return Response.status(status)
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(message))
.build();
}
}
ActionFactory getActionFactory() {
return actionFactory;
}
private boolean doesClusterMatch(String cluster) {
return getCluster().equalsIgnoreCase(cluster);
}
static List<String> verifyKeys(Set<String> keys, String... requiredKeys) {
final List<String> missingKeys = new ArrayList<>();
if (requiredKeys != null) {
for (String key : requiredKeys) {
if (!keys.contains(key)) {
missingKeys.add(key);
}
}
}
return missingKeys;
}
private Config getConfig(String cluster, String role, String environment, String topologyName) {
return createConfig(
Arrays.asList(
Pair.create(Key.CLUSTER.value(), cluster),
Pair.create(Key.ROLE.value(), role),
Pair.create(Key.ENVIRON.value(), environment),
Pair.create(Key.TOPOLOGY_NAME.value(), topologyName)
));
}
private Config createConfig(Collection<Pair<String, Object>> keyValues) {
return createConfig(keyValues, Collections.emptyMap());
}
private Config createConfig(Collection<Pair<String, Object>> keyValues,
Map<String, String> overrides) {
final Config.Builder builder = Config.newBuilder().putAll(getBaseConfiguration());
for (Pair<String, Object> keyValue : keyValues) {
builder.put(keyValue.first, keyValue.second);
}
overrides.forEach(builder::put);
builder.put(Key.VERBOSE, Logging.isVerbose());
return isLocalMode()
? Config.toLocalMode(builder.build()) : Config.toClusterMode(builder.build());
}
private boolean isLocalMode() {
return "local".equalsIgnoreCase(getCluster()) || "standalone".equalsIgnoreCase(getCluster())
|| "nomad".equalsIgnoreCase(getCluster());
}
private static Map<String, String> getSubmitOverrides(FormDataMultiPart form) {
final Map<String, String> overrides = new HashMap<>();
for (String key : form.getFields().keySet()) {
if (!SUBMIT_TOPOLOGY_PARAMS.contains(key)) {
overrides.put(key, Forms.getString(form, key));
}
}
return overrides;
}
private static Set<Pair<String, Object>> getUpdateOverrides(
MultivaluedMap<String, String> params) {
final Set<Pair<String, Object>> overrides = new HashSet<>();
for (String key : params.keySet()) {
if (!PARAM_COMPONENT_PARALLELISM.equalsIgnoreCase(key)) {
overrides.add(Pair.create(key, params.getFirst(key)));
}
}
return overrides;
}
@SuppressWarnings("IllegalCatch")
private static DryRunFormatType getDryRunFormatType(String type) {
try {
if (type != null) {
return DryRunFormatType.valueOf(type);
}
} catch (Exception ex) {
LOG.warn("unknown dry format render type {} defaulting to table", type);
}
return DryRunFormatType.TABLE;
}
private static String getDryRunResponse(DryRunResponse response, String type) {
if (response instanceof SubmitDryRunResponse) {
return DryRunRenders.render((SubmitDryRunResponse) response,
getDryRunFormatType(type));
} else if (response instanceof UpdateDryRunResponse) {
return DryRunRenders.render((UpdateDryRunResponse) response,
getDryRunFormatType(type));
}
return "Unknown dry run response type " + response.getClass().getName();
}
private static Response createDryRunResponse(DryRunResponse response, String type) {
final String body = new ObjectMapper().createObjectNode()
.put("response", getDryRunResponse(response, type))
.toString();
return Response.ok()
.type(MediaType.APPLICATION_JSON)
.entity(body)
.build();
}
private static String createdResponse(String cluster, String role, String environment,
String topologyName) {
return new ObjectMapper().createObjectNode()
.put("name", topologyName)
.put("cluster", cluster)
.put("role", role)
.put("environment", environment)
.toString();
}
}
| |
package org.renyan.leveldb.util;
import java.io.DataInput;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.GatheringByteChannel;
import java.nio.charset.Charset;
public final class SliceInput extends InputStream implements DataInput
{
private final Slice slice;
private int position;
public SliceInput(Slice slice)
{
this.slice = slice;
}
/**
* Returns the {@code position} of this buffer.
*/
public int position()
{
return position;
}
/**
* Sets the {@code position} of this buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code position} is
* less than {@code 0} or
* greater than {@code this.writerIndex}
*/
public void setPosition(int position)
{
if (position < 0 || position > slice.length()) {
throw new IndexOutOfBoundsException();
}
this.position = position;
}
/**
* Returns {@code true}
* if and only if {@code available()} is greater
* than {@code 0}.
*/
public boolean isReadable()
{
return available() > 0;
}
/**
* Returns the number of readable bytes which is equal to
* {@code (this.slice.length() - this.position)}.
*/
public int available()
{
return slice.length() - position;
}
@Override
public boolean readBoolean()
throws IOException
{
return readByte() != 0;
}
@Override
public int read()
{
return readByte();
}
/**
* Gets a byte at the current {@code position} and increases
* the {@code position} by {@code 1} in this buffer.
*
* @throws IndexOutOfBoundsException if {@code this.available()} is less than {@code 1}
*/
public byte readByte()
{
if (position == slice.length()) {
throw new IndexOutOfBoundsException();
}
return slice.getByte(position++);
}
/**
* Gets an unsigned byte at the current {@code position} and increases
* the {@code position} by {@code 1} in this buffer.
*
* @throws IndexOutOfBoundsException if {@code this.available()} is less than {@code 1}
*/
public int readUnsignedByte()
{
return (short) (readByte() & 0xFF);
}
/**
* Gets a 16-bit short integer at the current {@code position}
* and increases the {@code position} by {@code 2} in this buffer.
*
* @throws IndexOutOfBoundsException if {@code this.available()} is less than {@code 2}
*/
public short readShort()
{
short v = slice.getShort(position);
position += 2;
return v;
}
@Override
public int readUnsignedShort()
throws IOException
{
return readShort() & 0xff;
}
/**
* Gets a 32-bit integer at the current {@code position}
* and increases the {@code position} by {@code 4} in this buffer.
*
* @throws IndexOutOfBoundsException if {@code this.available()} is less than {@code 4}
*/
public int readInt()
{
int v = slice.getInt(position);
position += 4;
return v;
}
/**
* Gets an unsigned 32-bit integer at the current {@code position}
* and increases the {@code position} by {@code 4} in this buffer.
*
* @throws IndexOutOfBoundsException if {@code this.available()} is less than {@code 4}
*/
public long readUnsignedInt()
{
return readInt() & 0xFFFFFFFFL;
}
/**
* Gets a 64-bit integer at the current {@code position}
* and increases the {@code position} by {@code 8} in this buffer.
*
* @throws IndexOutOfBoundsException if {@code this.available()} is less than {@code 8}
*/
public long readLong()
{
long v = slice.getLong(position);
position += 8;
return v;
}
public byte[] readByteArray(int length)
{
byte[] value = slice.copyBytes(position, length);
position += length;
return value;
}
/**
* Transfers this buffer's data to a newly created buffer starting at
* the current {@code position} and increases the {@code position}
* by the number of the transferred bytes (= {@code length}).
* The returned buffer's {@code position} and {@code writerIndex} are
* {@code 0} and {@code length} respectively.
*
* @param length the number of bytes to transfer
* @return the newly created buffer which contains the transferred bytes
* @throws IndexOutOfBoundsException if {@code length} is greater than {@code this.available()}
*/
public Slice readBytes(int length)
{
if (length == 0) {
return Slices.EMPTY_SLICE;
}
Slice value = slice.slice(position, length);
position += length;
return value;
}
/**
* Returns a new slice of this buffer's sub-region starting at the current
* {@code position} and increases the {@code position} by the size
* of the new slice (= {@code length}).
*
* @param length the size of the new slice
* @return the newly created slice
* @throws IndexOutOfBoundsException if {@code length} is greater than {@code this.available()}
*/
public Slice readSlice(int length)
{
Slice newSlice = slice.slice(position, length);
position += length;
return newSlice;
}
@Override
public void readFully(byte[] destination)
{
readBytes(destination);
}
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code position} and increases the {@code position}
* by the number of the transferred bytes (= {@code dst.length}).
*
* @throws IndexOutOfBoundsException if {@code dst.length} is greater than {@code this.available()}
*/
public void readBytes(byte[] destination)
{
readBytes(destination, 0, destination.length);
}
@Override
public void readFully(byte[] destination, int offset, int length)
{
readBytes(destination, offset, length);
}
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code position} and increases the {@code position}
* by the number of the transferred bytes (= {@code length}).
*
* @param destinationIndex the first index of the destination
* @param length the number of bytes to transfer
* @throws IndexOutOfBoundsException if the specified {@code destinationIndex} is less than {@code 0},
* if {@code length} is greater than {@code this.available()}, or
* if {@code destinationIndex + length} is greater than {@code destination.length}
*/
public void readBytes(byte[] destination, int destinationIndex, int length)
{
slice.getBytes(position, destination, destinationIndex, length);
position += length;
}
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code position} until the destination becomes
* non-writable, and increases the {@code position} by the number of the
* transferred bytes. This method is basically same with
* {@link #readBytes(Slice, int, int)}, except that this method
* increases the {@code writerIndex} of the destination by the number of
* the transferred bytes while {@link #readBytes(Slice, int, int)}
* does not.
*
* @throws IndexOutOfBoundsException if {@code destination.writableBytes} is greater than
* {@code this.available()}
*/
public void readBytes(Slice destination)
{
readBytes(destination, destination.length());
}
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code position} and increases the {@code position}
* by the number of the transferred bytes (= {@code length}). This method
* is basically same with {@link #readBytes(Slice, int, int)},
* except that this method increases the {@code writerIndex} of the
* destination by the number of the transferred bytes (= {@code length})
* while {@link #readBytes(Slice, int, int)} does not.
*
* @throws IndexOutOfBoundsException if {@code length} is greater than {@code this.available()} or
* if {@code length} is greater than {@code destination.writableBytes}
*/
public void readBytes(Slice destination, int length)
{
if (length > destination.length()) {
throw new IndexOutOfBoundsException();
}
readBytes(destination, destination.length(), length);
}
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code position} and increases the {@code position}
* by the number of the transferred bytes (= {@code length}).
*
* @param destinationIndex the first index of the destination
* @param length the number of bytes to transfer
* @throws IndexOutOfBoundsException if the specified {@code destinationIndex} is less than {@code 0},
* if {@code length} is greater than {@code this.available()}, or
* if {@code destinationIndex + length} is greater than
* {@code destination.capacity}
*/
public void readBytes(Slice destination, int destinationIndex, int length)
{
slice.getBytes(position, destination, destinationIndex, length);
position += length;
}
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code position} until the destination's position
* reaches its limit, and increases the {@code position} by the
* number of the transferred bytes.
*
* @throws IndexOutOfBoundsException if {@code destination.remaining()} is greater than
* {@code this.available()}
*/
public void readBytes(ByteBuffer destination)
{
int length = destination.remaining();
slice.getBytes(position, destination);
position += length;
}
/**
* Transfers this buffer's data to the specified stream starting at the
* current {@code position}.
*
* @param length the maximum number of bytes to transfer
* @return the actual number of bytes written out to the specified channel
* @throws IndexOutOfBoundsException if {@code length} is greater than {@code this.available()}
* @throws java.io.IOException if the specified channel threw an exception during I/O
*/
public int readBytes(GatheringByteChannel out, int length)
throws IOException
{
int readBytes = slice.getBytes(position, out, length);
position += readBytes;
return readBytes;
}
/**
* Transfers this buffer's data to the specified stream starting at the
* current {@code position}.
*
* @param length the number of bytes to transfer
* @throws IndexOutOfBoundsException if {@code length} is greater than {@code this.available()}
* @throws java.io.IOException if the specified stream threw an exception during I/O
*/
public void readBytes(OutputStream out, int length)
throws IOException
{
slice.getBytes(position, out, length);
position += length;
}
public int skipBytes(int length)
{
length = Math.min(length, available());
position += length;
return length;
}
/**
* Returns a slice of this buffer's readable bytes. Modifying the content
* of the returned buffer or this buffer affects each other's content
* while they maintain separate indexes and marks. This method is
* identical to {@code buf.slice(buf.position(), buf.available()())}.
* This method does not modify {@code position} or {@code writerIndex} of
* this buffer.
*/
public Slice slice()
{
return slice.slice(position, available());
}
/**
* Converts this buffer's readable bytes into a NIO buffer. The returned
* buffer might or might not share the content with this buffer, while
* they have separate indexes and marks. This method is identical to
* {@code buf.toByteBuffer(buf.position(), buf.available()())}.
* This method does not modify {@code position} or {@code writerIndex} of
* this buffer.
*/
public ByteBuffer toByteBuffer()
{
return slice.toByteBuffer(position, available());
}
/**
* Decodes this buffer's readable bytes into a string with the specified
* character set name. This method is identical to
* {@code buf.toString(buf.position(), buf.available()(), charsetName)}.
* This method does not modify {@code position} or {@code writerIndex} of
* this buffer.
*
* @throws java.nio.charset.UnsupportedCharsetException if the specified character set name is not supported by the
* current VM
*/
public String toString(Charset charset)
{
return slice.toString(position, available(), charset);
}
@Override
public String toString()
{
return getClass().getSimpleName() + '(' +
"ridx=" + position + ", " +
"cap=" + slice.length() +
')';
}
//
// Unsupported operations
//
/**
* Unsupported operation
*
* @throws UnsupportedOperationException always
*/
@Override
public char readChar()
{
throw new UnsupportedOperationException();
}
/**
* Unsupported operation
*
* @throws UnsupportedOperationException always
*/
@Override
public float readFloat()
{
throw new UnsupportedOperationException();
}
@Override
public double readDouble()
{
throw new UnsupportedOperationException();
}
/**
* Unsupported operation
*
* @throws UnsupportedOperationException always
*/
@Override
public String readLine()
{
throw new UnsupportedOperationException();
}
/**
* Unsupported operation
*
* @throws UnsupportedOperationException always
*/
@Override
public String readUTF()
{
throw new UnsupportedOperationException();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.near;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.cluster.ClusterTopologyException;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheEntryEx;
import org.apache.ignite.internal.processors.cache.GridCacheEntryRemovedException;
import org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate;
import org.apache.ignite.internal.processors.cache.GridCacheSharedContext;
import org.apache.ignite.internal.processors.cache.distributed.GridDistributedTxMapping;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtTxMapping;
import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx;
import org.apache.ignite.internal.processors.cache.transactions.IgniteTxEntry;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.transactions.IgniteTxOptimisticCheckedException;
import org.apache.ignite.internal.transactions.IgniteTxRollbackCheckedException;
import org.apache.ignite.internal.transactions.IgniteTxTimeoutCheckedException;
import org.apache.ignite.internal.util.future.GridCompoundFuture;
import org.apache.ignite.internal.util.future.GridFinishedFuture;
import org.apache.ignite.internal.util.future.GridFutureAdapter;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.C1;
import org.apache.ignite.internal.util.typedef.CI1;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.P1;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.lang.IgniteProductVersion;
import org.apache.ignite.lang.IgniteReducer;
import org.apache.ignite.lang.IgniteUuid;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.internal.processors.cache.GridCacheOperation.TRANSFORM;
import static org.apache.ignite.transactions.TransactionState.PREPARED;
import static org.apache.ignite.transactions.TransactionState.PREPARING;
/**
*
*/
public class GridNearOptimisticSerializableTxPrepareFuture extends GridNearOptimisticTxPrepareFutureAdapter {
/** */
public static final IgniteProductVersion SER_TX_SINCE = IgniteProductVersion.fromString("1.5.0");
/** */
@GridToStringExclude
private KeyLockFuture keyLockFut;
/** */
@GridToStringExclude
private ClientRemapFuture remapFut;
/**
* @param cctx Context.
* @param tx Transaction.
*/
public GridNearOptimisticSerializableTxPrepareFuture(GridCacheSharedContext cctx,
GridNearTxLocal tx) {
super(cctx, tx);
assert tx.optimistic() && tx.serializable() : tx;
}
/** {@inheritDoc} */
@Override protected boolean ignoreFailure(Throwable err) {
return IgniteCheckedException.class.isAssignableFrom(err.getClass());
}
/** {@inheritDoc} */
@Override public boolean onOwnerChanged(GridCacheEntryEx entry, GridCacheMvccCandidate owner) {
if (log.isDebugEnabled())
log.debug("Transaction future received owner changed callback: " + entry);
if ((entry.context().isNear() || entry.context().isLocal()) && owner != null) {
IgniteTxEntry txEntry = tx.entry(entry.txKey());
if (txEntry != null) {
if (entry.context().isLocal()) {
GridCacheVersion serReadVer = txEntry.entryReadVersion();
if (serReadVer != null) {
GridCacheContext ctx = entry.context();
while (true) {
try {
if (!entry.checkSerializableReadVersion(serReadVer)) {
Object key = entry.key().value(ctx.cacheObjectContext(), false);
IgniteTxOptimisticCheckedException err0 =
new IgniteTxOptimisticCheckedException("Failed to prepare transaction, " +
"read/write conflict [key=" + key + ", cache=" + ctx.name() + ']');
ERR_UPD.compareAndSet(this, null, err0);
}
break;
}
catch (GridCacheEntryRemovedException e) {
entry = ctx.cache().entryEx(entry.key(), tx.topologyVersion());
txEntry.cached(entry);
}
}
}
}
if (keyLockFut != null)
keyLockFut.onKeyLocked(entry.txKey());
return true;
}
}
return false;
}
/** {@inheritDoc} */
@Override public boolean onNodeLeft(UUID nodeId) {
boolean found = false;
for (IgniteInternalFuture<?> fut : futures()) {
if (isMini(fut)) {
MiniFuture f = (MiniFuture) fut;
if (f.node().id().equals(nodeId)) {
ClusterTopologyCheckedException e = new ClusterTopologyCheckedException("Remote node left grid: " +
nodeId);
e.retryReadyFuture(cctx.nextAffinityReadyFuture(tx.topologyVersion()));
f.onNodeLeft(e);
found = true;
}
}
}
return found;
}
/**
* @param m Failed mapping.
* @param e Error.
*/
private void onError(@Nullable GridDistributedTxMapping m, Throwable e) {
if (X.hasCause(e, ClusterTopologyCheckedException.class) || X.hasCause(e, ClusterTopologyException.class)) {
if (tx.onePhaseCommit()) {
tx.markForBackupCheck();
onComplete();
return;
}
}
if (e instanceof IgniteTxOptimisticCheckedException || e instanceof IgniteTxTimeoutCheckedException) {
if (m != null)
tx.removeMapping(m.node().id());
}
ERR_UPD.compareAndSet(this, null, e);
if (keyLockFut != null)
keyLockFut.onDone(e);
}
/** {@inheritDoc} */
@Override public void onResult(UUID nodeId, GridNearTxPrepareResponse res) {
if (!isDone()) {
MiniFuture mini = miniFuture(res.miniId());
if (mini != null)
mini.onResult(res);
}
}
/** {@inheritDoc} */
@Override public boolean onDone(IgniteInternalTx t, Throwable err) {
if (isDone())
return false;
if (err != null) {
ERR_UPD.compareAndSet(this, null, err);
if (keyLockFut != null)
keyLockFut.onDone(err);
}
return onComplete();
}
/**
* Finds pending mini future by the given mini ID.
*
* @param miniId Mini ID to find.
* @return Mini future.
*/
@SuppressWarnings("ForLoopReplaceableByForEach")
private MiniFuture miniFuture(IgniteUuid miniId) {
// We iterate directly over the futs collection here to avoid copy.
synchronized (sync) {
// Avoid iterator creation.
for (int i = 0; i < futuresCount(); i++) {
IgniteInternalFuture<GridNearTxPrepareResponse> fut = future(i);
if (!isMini(fut))
continue;
MiniFuture mini = (MiniFuture)fut;
if (mini.futureId().equals(miniId)) {
if (!mini.isDone())
return mini;
else
return null;
}
}
}
return null;
}
/**
* @param f Future.
* @return {@code True} if mini-future.
*/
private boolean isMini(IgniteInternalFuture<?> f) {
return f.getClass().equals(MiniFuture.class);
}
/**
* Completeness callback.
*
* @return {@code True} if future was finished by this call.
*/
private boolean onComplete() {
Throwable err0 = err;
if (err0 == null || tx.needCheckBackup())
tx.state(PREPARED);
if (super.onDone(tx, err0)) {
if (err0 != null)
tx.setRollbackOnly();
// Don't forget to clean up.
cctx.mvcc().removeMvccFuture(this);
return true;
}
return false;
}
/**
* Initializes future.
*
* @param remap Remap flag.
*/
@Override protected void prepare0(boolean remap, boolean topLocked) {
boolean txStateCheck = remap ? tx.state() == PREPARING : tx.state(PREPARING);
if (!txStateCheck) {
if (tx.setRollbackOnly()) {
if (tx.timedOut())
onError(null, new IgniteTxTimeoutCheckedException("Transaction timed out and " +
"was rolled back: " + this));
else
onError(null, new IgniteCheckedException("Invalid transaction state for prepare " +
"[state=" + tx.state() + ", tx=" + this + ']'));
}
else
onError(null, new IgniteTxRollbackCheckedException("Invalid transaction state for " +
"prepare [state=" + tx.state() + ", tx=" + this + ']'));
return;
}
boolean set = cctx.tm().setTxTopologyHint(tx.topologyVersionSnapshot());
try {
prepare(tx.readEntries(), tx.writeEntries(), remap, topLocked);
markInitialized();
}
finally {
if (set)
cctx.tm().setTxTopologyHint(null);
}
}
/**
* @param reads Read entries.
* @param writes Write entries.
* @param remap Remap flag.
* @param topLocked Topology locked flag.
*/
@SuppressWarnings("unchecked")
private void prepare(
Iterable<IgniteTxEntry> reads,
Iterable<IgniteTxEntry> writes,
boolean remap,
boolean topLocked
) {
AffinityTopologyVersion topVer = tx.topologyVersion();
assert topVer.topologyVersion() > 0;
txMapping = new GridDhtTxMapping();
Map<IgniteBiTuple<ClusterNode, Boolean>, GridDistributedTxMapping> mappings = new HashMap<>();
for (IgniteTxEntry write : writes)
map(write, topVer, mappings, remap, topLocked);
for (IgniteTxEntry read : reads)
map(read, topVer, mappings, remap, topLocked);
if (keyLockFut != null)
keyLockFut.onAllKeysAdded();
if (isDone()) {
if (log.isDebugEnabled())
log.debug("Abandoning (re)map because future is done: " + this);
return;
}
tx.addEntryMapping(mappings.values());
cctx.mvcc().recheckPendingLocks();
tx.transactionNodes(txMapping.transactionNodes());
checkOnePhase();
for (GridDistributedTxMapping m : mappings.values()) {
assert !m.empty();
add(new MiniFuture(this, m));
}
Collection<IgniteInternalFuture<?>> futs = (Collection)futures();
Iterator<IgniteInternalFuture<?>> it = futs.iterator();
while (it.hasNext()) {
IgniteInternalFuture<?> fut0 = it.next();
if (skipFuture(remap, fut0))
continue;
MiniFuture fut = (MiniFuture)fut0;
IgniteCheckedException err = prepare(fut);
if (err != null) {
while (it.hasNext()) {
fut0 = it.next();
if (skipFuture(remap, fut0))
continue;
fut = (MiniFuture)fut0;
tx.removeMapping(fut.mapping().node().id());
fut.onResult(new IgniteCheckedException("Failed to prepare transaction.", err));
}
break;
}
}
markInitialized();
}
/**
* @param remap Remap flag.
* @param fut Future.
* @return {@code True} if skip future during remap.
*/
private boolean skipFuture(boolean remap, IgniteInternalFuture<?> fut) {
return !(isMini(fut)) || (remap && (((MiniFuture)fut).rcvRes == 1));
}
/**
* @param fut Mini future.
* @return Prepare error if any.
*/
@Nullable private IgniteCheckedException prepare(final MiniFuture fut) {
GridDistributedTxMapping m = fut.mapping();
final ClusterNode n = m.node();
long timeout = tx.remainingTime();
if (timeout == -1) {
IgniteCheckedException err = tx.timeoutException();
fut.onResult(err);
return err;
}
GridNearTxPrepareRequest req = new GridNearTxPrepareRequest(
futId,
tx.topologyVersion(),
tx,
timeout,
m.reads(),
m.writes(),
m.near(),
txMapping.transactionNodes(),
m.last(),
tx.onePhaseCommit(),
tx.needReturnValue() && tx.implicit(),
tx.implicitSingle(),
m.explicitLock(),
tx.subjectId(),
tx.taskNameHash(),
m.clientFirst(),
tx.activeCachesDeploymentEnabled());
for (IgniteTxEntry txEntry : m.entries()) {
if (txEntry.op() == TRANSFORM)
req.addDhtVersion(txEntry.txKey(), null);
}
// Must lock near entries separately.
if (m.near()) {
try {
tx.optimisticLockEntries(m.entries());
tx.userPrepare();
}
catch (IgniteCheckedException e) {
fut.onResult(e);
return e;
}
}
req.miniId(fut.futureId());
// If this is the primary node for the keys.
if (n.isLocal()) {
IgniteInternalFuture<GridNearTxPrepareResponse> prepFut = cctx.tm().txHandler().prepareTx(n.id(), tx, req);
prepFut.listen(new CI1<IgniteInternalFuture<GridNearTxPrepareResponse>>() {
@Override public void apply(IgniteInternalFuture<GridNearTxPrepareResponse> prepFut) {
try {
fut.onResult(prepFut.get());
}
catch (IgniteCheckedException e) {
fut.onResult(e);
}
}
});
}
else {
try {
cctx.io().send(n, req, tx.ioPolicy());
}
catch (ClusterTopologyCheckedException e) {
e.retryReadyFuture(cctx.nextAffinityReadyFuture(tx.topologyVersion()));
fut.onNodeLeft(e);
return e;
}
catch (IgniteCheckedException e) {
fut.onResult(e);
return e;
}
}
return null;
}
/**
* @param entry Transaction entry.
* @param topVer Topology version.
* @param curMapping Current mapping.
* @param remap Remap flag.
* @param topLocked Topology locked flag.
*/
private void map(
IgniteTxEntry entry,
AffinityTopologyVersion topVer,
Map<IgniteBiTuple<ClusterNode, Boolean>, GridDistributedTxMapping> curMapping,
boolean remap,
boolean topLocked
) {
GridCacheContext cacheCtx = entry.context();
List<ClusterNode> nodes = cacheCtx.isLocal() ?
cacheCtx.affinity().nodes(entry.key(), topVer) :
cacheCtx.topology().nodes(cacheCtx.affinity().partition(entry.key()), topVer);
txMapping.addMapping(nodes);
ClusterNode primary = F.first(nodes);
assert primary != null;
if (log.isDebugEnabled()) {
log.debug("Mapped key to primary node [key=" + entry.key() +
", part=" + cacheCtx.affinity().partition(entry.key()) +
", primary=" + U.toShortString(primary) + ", topVer=" + topVer + ']');
}
if (primary.version().compareTo(SER_TX_SINCE) < 0) {
onDone(new IgniteCheckedException("Optimistic serializable transactions can be used only with node " +
"version starting from " + SER_TX_SINCE));
return;
}
// Must re-initialize cached entry while holding topology lock.
if (cacheCtx.isNear())
entry.cached(cacheCtx.nearTx().entryExx(entry.key(), topVer));
else if (!cacheCtx.isLocal())
entry.cached(cacheCtx.colocated().entryExx(entry.key(), topVer, true));
else
entry.cached(cacheCtx.local().entryEx(entry.key(), topVer));
if (!remap && (cacheCtx.isNear() || cacheCtx.isLocal())) {
if (entry.explicitVersion() == null) {
if (keyLockFut == null) {
keyLockFut = new KeyLockFuture();
add(keyLockFut);
}
keyLockFut.addLockKey(entry.txKey());
}
}
IgniteBiTuple<ClusterNode, Boolean> key = F.t(primary, cacheCtx.isNear());
GridDistributedTxMapping cur = curMapping.get(key);
if (cur == null) {
cur = new GridDistributedTxMapping(primary);
curMapping.put(key, cur);
if (primary.isLocal()) {
if (entry.context().isNear())
tx.nearLocallyMapped(true);
else if (entry.context().isColocated())
tx.colocatedLocallyMapped(true);
}
// Initialize near flag right away.
cur.near(cacheCtx.isNear());
cur.clientFirst(!topLocked && cctx.kernalContext().clientNode());
cur.last(true);
}
cur.add(entry);
if (entry.explicitVersion() != null) {
tx.markExplicit(primary.id());
cur.markExplicitLock();
}
entry.nodeId(primary.id());
if (cacheCtx.isNear()) {
while (true) {
try {
GridNearCacheEntry cached = (GridNearCacheEntry)entry.cached();
cached.dhtNodeId(tx.xidVersion(), primary.id());
break;
}
catch (GridCacheEntryRemovedException ignore) {
entry.cached(cacheCtx.near().entryEx(entry.key(), topVer));
}
}
}
}
/** {@inheritDoc} */
@Override public String toString() {
Collection<String> futs = F.viewReadOnly(futures(),
new C1<IgniteInternalFuture<?>, String>() {
@Override public String apply(IgniteInternalFuture<?> f) {
return "[node=" + ((MiniFuture)f).node().id() +
", loc=" + ((MiniFuture)f).node().isLocal() +
", done=" + f.isDone() + "]";
}
},
new P1<IgniteInternalFuture<?>>() {
@Override public boolean apply(IgniteInternalFuture<?> f) {
return isMini(f);
}
});
return S.toString(GridNearOptimisticSerializableTxPrepareFuture.class, this,
"innerFuts", futs,
"keyLockFut", keyLockFut,
"tx", tx,
"super", super.toString());
}
/**
* Client remap future.
*/
private static class ClientRemapFuture extends GridCompoundFuture<GridNearTxPrepareResponse, Boolean> {
/** */
private static final long serialVersionUID = 0L;
/**
* Constructor.
*/
public ClientRemapFuture() {
super(new ClientRemapFutureReducer());
}
}
/**
* Client remap future reducer.
*/
private static class ClientRemapFutureReducer implements IgniteReducer<GridNearTxPrepareResponse, Boolean> {
/** */
private static final long serialVersionUID = 0L;
/** Remap flag. */
private boolean remap = true;
/** {@inheritDoc} */
@Override public boolean collect(@Nullable GridNearTxPrepareResponse res) {
assert res != null;
if (res.clientRemapVersion() == null)
remap = false;
return true;
}
/** {@inheritDoc} */
@Override public Boolean reduce() {
return remap;
}
}
/**
*
*/
private static class MiniFuture extends GridFutureAdapter<GridNearTxPrepareResponse> {
/** */
private static final long serialVersionUID = 0L;
/** Receive result flag updater. */
private static AtomicIntegerFieldUpdater<MiniFuture> RCV_RES_UPD =
AtomicIntegerFieldUpdater.newUpdater(MiniFuture.class, "rcvRes");
/** */
private final IgniteUuid futId = IgniteUuid.randomUuid();
/** Parent future. */
private final GridNearOptimisticSerializableTxPrepareFuture parent;
/** Keys. */
@GridToStringInclude
private GridDistributedTxMapping m;
/** Flag to signal some result being processed. */
@SuppressWarnings("UnusedDeclaration")
private volatile int rcvRes;
/**
* @param parent Parent future.
* @param m Mapping.
*/
MiniFuture(GridNearOptimisticSerializableTxPrepareFuture parent, GridDistributedTxMapping m) {
this.parent = parent;
this.m = m;
}
/**
* @return Future ID.
*/
IgniteUuid futureId() {
return futId;
}
/**
* @return Node ID.
*/
public ClusterNode node() {
return m.node();
}
/**
* @return Keys.
*/
public GridDistributedTxMapping mapping() {
return m;
}
/**
* @param e Error.
*/
void onResult(Throwable e) {
if (RCV_RES_UPD.compareAndSet(this, 0, 1)) {
parent.onError(m, e);
if (log.isDebugEnabled())
log.debug("Failed to get future result [fut=" + this + ", err=" + e + ']');
// Fail.
onDone(e);
}
else
U.warn(log, "Received error after another result has been processed [fut=" +
parent + ", mini=" + this + ']', e);
}
/**
* @param e Node failure.
*/
void onNodeLeft(ClusterTopologyCheckedException e) {
if (isDone())
return;
if (RCV_RES_UPD.compareAndSet(this, 0, 1)) {
if (log.isDebugEnabled())
log.debug("Remote node left grid while sending or waiting for reply (will not retry): " + this);
parent.onError(null, e);
onDone(e);
}
}
/**
* @param res Result callback.
*/
@SuppressWarnings({"unchecked", "ThrowableResultOfMethodCallIgnored"})
void onResult(final GridNearTxPrepareResponse res) {
if (isDone())
return;
if (RCV_RES_UPD.compareAndSet(this, 0, 1)) {
if (res.error() != null) {
// Fail the whole compound future.
parent.onError(m, res.error());
onDone(res.error());
}
else {
if (res.clientRemapVersion() != null) {
assert parent.cctx.kernalContext().clientNode();
assert m.clientFirst();
parent.tx.removeMapping(m.node().id());
ClientRemapFuture remapFut0 = null;
synchronized (parent) {
if (parent.remapFut == null) {
parent.remapFut = new ClientRemapFuture();
remapFut0 = parent.remapFut;
}
}
if (remapFut0 != null) {
Collection<IgniteInternalFuture<?>> futs = (Collection)parent.futures();
for (IgniteInternalFuture<?> fut : futs) {
if (parent.isMini(fut) && fut != this)
remapFut0.add((MiniFuture)fut);
}
remapFut0.markInitialized();
remapFut0.listen(new CI1<IgniteInternalFuture<Boolean>>() {
@Override public void apply(IgniteInternalFuture<Boolean> remapFut0) {
try {
IgniteInternalFuture<?> affFut =
parent.cctx.exchange().affinityReadyFuture(res.clientRemapVersion());
if (affFut == null)
affFut = new GridFinishedFuture<Object>();
if (parent.remapFut.get()) {
if (log.isDebugEnabled()) {
log.debug("Will remap client tx [" +
"fut=" + parent +
", topVer=" + res.topologyVersion() + ']');
}
synchronized (parent) {
assert remapFut0 == parent.remapFut;
parent.remapFut = null;
}
affFut.listen(new CI1<IgniteInternalFuture<?>>() {
@Override public void apply(IgniteInternalFuture<?> affFut) {
try {
affFut.get();
remap(res);
}
catch (IgniteCheckedException e) {
ERR_UPD.compareAndSet(parent, null, e);
onDone(e);
}
}
});
}
else {
ClusterTopologyCheckedException err0 = new ClusterTopologyCheckedException(
"Cluster topology changed while client transaction is preparing.");
err0.retryReadyFuture(affFut);
ERR_UPD.compareAndSet(parent, null, err0);
onDone(err0);
}
}
catch (IgniteCheckedException e) {
if (log.isDebugEnabled()) {
log.debug("Prepare failed, will not remap tx: " +
parent);
}
ERR_UPD.compareAndSet(parent, null, e);
onDone(e);
}
}
});
}
else
onDone(res);
}
else {
parent.onPrepareResponse(m, res);
// Finish this mini future (need result only on client node).
onDone(parent.cctx.kernalContext().clientNode() ? res : null);
}
}
}
}
/**
* @param res Response.
*/
private void remap(final GridNearTxPrepareResponse res) {
parent.prepareOnTopology(true, new Runnable() {
@Override
public void run() {
onDone(res);
}
});
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(MiniFuture.class, this, "done", isDone(), "cancelled", isCancelled(), "err", error());
}
}
}
| |
/*************************************************************************
*
* ADOBE CONFIDENTIAL
* __________________
*
* Copyright 2002 - 2007 Adobe Systems Incorporated
* All Rights Reserved.
*
* NOTICE: All information contained herein is, and remains
* the property of Adobe Systems Incorporated and its suppliers,
* if any. The intellectual and technical concepts contained
* herein are proprietary to Adobe Systems Incorporated
* and its suppliers and may be covered by U.S. and Foreign Patents,
* patents in process, and are protected by trade secret or copyright law.
* Dissemination of this information or reproduction of this material
* is strictly forbidden unless prior written permission is obtained
* from Adobe Systems Incorporated.
**************************************************************************/
package flex.management;
import java.util.Date;
import flex.messaging.FlexComponent;
import flex.messaging.config.ConfigMap;
import flex.messaging.config.ConfigurationException;
import flex.messaging.log.Log;
/**
* An abstract base class that implements the <code>Manageable</code> and <code>FlexComponent</code> interfaces.
* This is an excellent starting point for a server component that may be instantiated, initialized, started and
* stopped, as well as exposing an optional management interface via a peer MBean.
* <p>Support for changing component properties while the component is
* started should be determined on a per-property basis, and the started property is volatile to ensure consistent
* reads of the start state of the component across threads. This class performs no synchronization and is not safe for modification by multiple concurrent threads
* in the absence of external synchronization.
* </p>
*/
public abstract class ManageableComponent implements Manageable, FlexComponent
{
//--------------------------------------------------------------------------
//
// Protected Static Constants
//
//--------------------------------------------------------------------------
/**
* Error code for attempting to change a property after starting.
*/
protected static final int PROPERTY_CHANGE_AFTER_STARTUP = 11115;
/**
* Error code to alert the user that a required component property is null.
*/
protected static final int NULL_COMPONENT_PROPERTY = 11116;
//--------------------------------------------------------------------------
//
// Constructor
//
//--------------------------------------------------------------------------
/**
* Constructs a <code>ManageableComponent</code> instance, specifying
* whether to enable management.
* Enabling management will trigger the creation of a peer MBean that exposes the
* management interface for this component.
*
* @param enableManagement <code>true</code> to enable management, <code>false</code> to disable
* management.
*/
public ManageableComponent(boolean enableManagement)
{
setManaged(enableManagement);
}
//--------------------------------------------------------------------------
//
// Public Properties
//
//--------------------------------------------------------------------------
//----------------------------------
// control
//----------------------------------
/**
* The peer MBean of the <code>ManageableComponent</code> that exposes a management interface.
*/
protected BaseControl control;
/**
* @see Manageable#getControl()
*/
public BaseControl getControl()
{
return control;
}
/**
* @see Manageable#setControl(BaseControl)
*/
public void setControl(BaseControl control)
{
this.control = control;
}
//----------------------------------
// id
//----------------------------------
/**
* The internal id value of the <code>ManageableComponent</code>.
*/
protected String id;
/**
* Returns the id of the <code>ManageableComponent</code>.
*
* @return The id of the <code>ManageableComponent</code>.
*/
public String getId()
{
return id;
}
/**
* Sets the id of the <code>ManageableComponent</code>. The id cannot be
* null and it cannot be changed after startup.
*
* @param id The id of the <code>ManageableComponent</code>.
*/
public void setId(String id)
{
if (isStarted())
{
blockAssignmentWhileStarted("id");
}
if (id == null)
{
// Id of a component cannot be null.
blockNullAssignment("id");
}
this.id = id;
}
//----------------------------------
// managed
//----------------------------------
/**
* The internal managed flag of the <code>ManageableComponent</code>.
*/
protected volatile boolean managed;
/**
* @see Manageable#isManaged()
*/
public boolean isManaged()
{
return managed;
}
/**
* Enables or disables management for the component. Management cannot be
* changed once the component is started and management cannot be
* <code>true</code> if the parent of the component is not managed.
*
* @param enableManagement <code>true</code> to enable management, <code>false</code> to disable management.
*/
public void setManaged(boolean enableManagement)
{
if (isStarted() && control != null)
{
blockAssignmentWhileStarted("managed");
}
if (enableManagement && parent != null && !parent.isManaged())
{
if (Log.isWarn())
{
Log.getLogger(getLogCategory()).warn("Component: '" + id + "' cannot be managed" +
" since its parent is unmanaged.");
}
return;
}
managed = enableManagement;
}
//----------------------------------
// parent
//----------------------------------
/**
* The internal reference to the parent component (if any) of the <code>ManageableComponent</code>.
*/
protected Manageable parent;
/**
* Returns the parent of the component.
*
* @return The parent of the component.
*/
public Manageable getParent()
{
return parent;
}
/**
* Sets the parent of the component. The parent cannot be changed
* after component startup and it cannot be null.
*
* @param parent The parent of the component.
*/
public void setParent(Manageable parent)
{
if (isStarted())
{
blockAssignmentWhileStarted("parent");
}
if (parent == null)
{
// Parent of a component cannot be null.
blockNullAssignment("parent");
}
if (!parent.isManaged() && isManaged())
{
if (Log.isWarn())
{
Log.getLogger(getLogCategory()).warn("Component: '" + id + "' cannot be managed" +
" since its parent is unmanaged.");
}
setManaged(false);
}
this.parent = parent;
}
//----------------------------------
// started
//----------------------------------
/**
* The internal started flag of the <code>ManageableComponent</code>.
*/
protected volatile boolean started;
/**
* Returns if the component is started or not.
*
* @return <code>true</code> if the component is started.
*/
public boolean isStarted()
{
return started;
}
/**
* Sets if the component is started.
*
* @param started
*/
protected void setStarted(boolean started)
{
if (this.started != started)
{
this.started = started;
if (started && control != null)
{
control.setStartTimestamp(new Date());
}
}
}
//----------------------------------
// valid
//----------------------------------
/**
* The internal valid flag of the <code>ManageableComponent</code>.
*/
protected boolean valid;
/**
* Returns if the component is valid.
*
* @return <code>true</code> if the component is valid.
*/
public boolean isValid()
{
return valid;
}
/**
* Sets if the component is valid.
*
* @param valid
*/
protected void setValid(boolean valid)
{
this.valid = valid;
}
//----------------------------------
// logCategory
//----------------------------------
/**
* Returns the log category of the component. Subclasses must provide an
* implementation that returns their desired log category.
*
* @return The log category of the component.
*/
protected abstract String getLogCategory();
//--------------------------------------------------------------------------
//
// Public Methods
//
//--------------------------------------------------------------------------
/**
* Invoked to initialize the <code>ManageableComponent</code>.
* This base implementation calls <code>setId()</code> passing the provided
* id and ignores the properties map argument.
* Subclasses should call <code>super.initialize()</code>.
*
* @param id Id of the <code>ManageableComponent</code>.
* @param properties Properties for the <code>ManageableComponent</code>.
*/
public void initialize(String id, ConfigMap properties)
{
setId(id);
}
/**
* Validates and starts the component.
*
* Subclasses should call <code>super.start()</code>.
*/
public void start()
{
validate();
setStarted(true);
}
/**
* Invalidates and stops the component.
*
* Subclasses should call <code>super.stop()</code>.
*/
public void stop()
{
invalidate();
setStarted(false);
}
//--------------------------------------------------------------------------
//
// Protocted Methods
//
//--------------------------------------------------------------------------
/**
* Convenience method that may be used to generate and throw an Exception for an attempt to set the specified property if the
* component is started.
*
* @param propertyName The name of the property being incorrectly assigned; included in the Exception message.
*/
protected void blockAssignmentWhileStarted(String propertyName)
{
ConfigurationException ce = new ConfigurationException();
ce.setMessage(PROPERTY_CHANGE_AFTER_STARTUP, new Object[]{propertyName});
throw ce;
}
/**
* Convenience method that may be used to generate and throw an Exception for an attempt to assign a null value to a property that
* requires non-null values.
*
* @param propertyName The name of the property being incorrectly assigned.
*/
protected void blockNullAssignment(String propertyName)
{
ConfigurationException ce = new ConfigurationException();
ce.setMessage(NULL_COMPONENT_PROPERTY, new Object[]{propertyName});
throw ce;
}
/**
* Invoked from within the <code>stop()</code> method to invalidate the component as part of shutdown.
* This base implementation sets the valid property to false.
* Subclasses should call <code>super.invalidate()</code>.
*/
protected void invalidate()
{
setValid(false);
}
/**
* Hook method invoked from within the <code>start()</code> method to validate that the component is in a
* startable state.
* This base implementation validates the component by ensuring it has an id and a parent and then sets
* the valid property to true.
* If the component is not in a valid, startable state an Exception is thrown.
* Subclasses should call <code>super.validate()</code>.
*/
protected void validate()
{
if (getId() == null)
{
// Id of a component cannot be null.
blockNullAssignment("id");
}
if (getParent() == null)
{
// Parent of a component cannot be null.
blockNullAssignment("parent");
}
setValid(true);
}
}
| |
package org.openestate.io.idealista.json;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import javax.validation.Valid;
import javax.validation.constraints.Pattern;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
/**
* Contact Object
* <p>
*
*
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonPropertyOrder({
"contactName",
"contactEmail",
"contactPrimaryPhonePrefix",
"contactPrimaryPhoneNumber",
"contactSecondaryPhonePrefix",
"contactSecondaryPhoneNumber"
})
public class Contact implements Serializable
{
@JsonProperty("contactName")
@Pattern(regexp = "^.{0,60}$")
private String name;
@JsonProperty("contactEmail")
@Pattern(regexp = "^(([a-zA-Z0-9-_\\.])+)@((?:[a-zA-Z0-9-_]+\\.)+)([a-zA-Z]{2,5})$")
private String email;
@JsonProperty("contactPrimaryPhonePrefix")
@Pattern(regexp = "^[1-9][0-9]{0,2}$")
private String primaryPhonePrefix;
@JsonProperty("contactPrimaryPhoneNumber")
@Pattern(regexp = "^[0-9]{5,12}$")
private String primaryPhoneNumber;
@JsonProperty("contactSecondaryPhonePrefix")
@Pattern(regexp = "^[1-9][0-9]{0,2}$")
private String secondaryPhonePrefix;
@JsonProperty("contactSecondaryPhoneNumber")
@Pattern(regexp = "^[0-9]{5,12}$")
private String secondaryPhoneNumber;
@JsonIgnore
@Valid
private Map<String, Object> additionalProperties = new HashMap<String, Object>();
private final static long serialVersionUID = 3433591450038418896L;
@JsonProperty("contactName")
public String getName() {
return name;
}
@JsonProperty("contactName")
public void setName(String name) {
this.name = name;
}
public Contact withName(String name) {
this.name = name;
return this;
}
@JsonProperty("contactEmail")
public String getEmail() {
return email;
}
@JsonProperty("contactEmail")
public void setEmail(String email) {
this.email = email;
}
public Contact withEmail(String email) {
this.email = email;
return this;
}
@JsonProperty("contactPrimaryPhonePrefix")
public String getPrimaryPhonePrefix() {
return primaryPhonePrefix;
}
@JsonProperty("contactPrimaryPhonePrefix")
public void setPrimaryPhonePrefix(String primaryPhonePrefix) {
this.primaryPhonePrefix = primaryPhonePrefix;
}
public Contact withPrimaryPhonePrefix(String primaryPhonePrefix) {
this.primaryPhonePrefix = primaryPhonePrefix;
return this;
}
@JsonProperty("contactPrimaryPhoneNumber")
public String getPrimaryPhoneNumber() {
return primaryPhoneNumber;
}
@JsonProperty("contactPrimaryPhoneNumber")
public void setPrimaryPhoneNumber(String primaryPhoneNumber) {
this.primaryPhoneNumber = primaryPhoneNumber;
}
public Contact withPrimaryPhoneNumber(String primaryPhoneNumber) {
this.primaryPhoneNumber = primaryPhoneNumber;
return this;
}
@JsonProperty("contactSecondaryPhonePrefix")
public String getSecondaryPhonePrefix() {
return secondaryPhonePrefix;
}
@JsonProperty("contactSecondaryPhonePrefix")
public void setSecondaryPhonePrefix(String secondaryPhonePrefix) {
this.secondaryPhonePrefix = secondaryPhonePrefix;
}
public Contact withSecondaryPhonePrefix(String secondaryPhonePrefix) {
this.secondaryPhonePrefix = secondaryPhonePrefix;
return this;
}
@JsonProperty("contactSecondaryPhoneNumber")
public String getSecondaryPhoneNumber() {
return secondaryPhoneNumber;
}
@JsonProperty("contactSecondaryPhoneNumber")
public void setSecondaryPhoneNumber(String secondaryPhoneNumber) {
this.secondaryPhoneNumber = secondaryPhoneNumber;
}
public Contact withSecondaryPhoneNumber(String secondaryPhoneNumber) {
this.secondaryPhoneNumber = secondaryPhoneNumber;
return this;
}
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
public Contact withAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
return this;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(Contact.class.getName()).append('@').append(Integer.toHexString(System.identityHashCode(this))).append('[');
sb.append("name");
sb.append('=');
sb.append(((this.name == null)?"<null>":this.name));
sb.append(',');
sb.append("email");
sb.append('=');
sb.append(((this.email == null)?"<null>":this.email));
sb.append(',');
sb.append("primaryPhonePrefix");
sb.append('=');
sb.append(((this.primaryPhonePrefix == null)?"<null>":this.primaryPhonePrefix));
sb.append(',');
sb.append("primaryPhoneNumber");
sb.append('=');
sb.append(((this.primaryPhoneNumber == null)?"<null>":this.primaryPhoneNumber));
sb.append(',');
sb.append("secondaryPhonePrefix");
sb.append('=');
sb.append(((this.secondaryPhonePrefix == null)?"<null>":this.secondaryPhonePrefix));
sb.append(',');
sb.append("secondaryPhoneNumber");
sb.append('=');
sb.append(((this.secondaryPhoneNumber == null)?"<null>":this.secondaryPhoneNumber));
sb.append(',');
sb.append("additionalProperties");
sb.append('=');
sb.append(((this.additionalProperties == null)?"<null>":this.additionalProperties));
sb.append(',');
if (sb.charAt((sb.length()- 1)) == ',') {
sb.setCharAt((sb.length()- 1), ']');
} else {
sb.append(']');
}
return sb.toString();
}
@Override
public int hashCode() {
int result = 1;
result = ((result* 31)+((this.primaryPhonePrefix == null)? 0 :this.primaryPhonePrefix.hashCode()));
result = ((result* 31)+((this.name == null)? 0 :this.name.hashCode()));
result = ((result* 31)+((this.secondaryPhonePrefix == null)? 0 :this.secondaryPhonePrefix.hashCode()));
result = ((result* 31)+((this.primaryPhoneNumber == null)? 0 :this.primaryPhoneNumber.hashCode()));
result = ((result* 31)+((this.secondaryPhoneNumber == null)? 0 :this.secondaryPhoneNumber.hashCode()));
result = ((result* 31)+((this.additionalProperties == null)? 0 :this.additionalProperties.hashCode()));
result = ((result* 31)+((this.email == null)? 0 :this.email.hashCode()));
return result;
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if ((other instanceof Contact) == false) {
return false;
}
Contact rhs = ((Contact) other);
return ((((((((this.primaryPhonePrefix == rhs.primaryPhonePrefix)||((this.primaryPhonePrefix!= null)&&this.primaryPhonePrefix.equals(rhs.primaryPhonePrefix)))&&((this.name == rhs.name)||((this.name!= null)&&this.name.equals(rhs.name))))&&((this.secondaryPhonePrefix == rhs.secondaryPhonePrefix)||((this.secondaryPhonePrefix!= null)&&this.secondaryPhonePrefix.equals(rhs.secondaryPhonePrefix))))&&((this.primaryPhoneNumber == rhs.primaryPhoneNumber)||((this.primaryPhoneNumber!= null)&&this.primaryPhoneNumber.equals(rhs.primaryPhoneNumber))))&&((this.secondaryPhoneNumber == rhs.secondaryPhoneNumber)||((this.secondaryPhoneNumber!= null)&&this.secondaryPhoneNumber.equals(rhs.secondaryPhoneNumber))))&&((this.additionalProperties == rhs.additionalProperties)||((this.additionalProperties!= null)&&this.additionalProperties.equals(rhs.additionalProperties))))&&((this.email == rhs.email)||((this.email!= null)&&this.email.equals(rhs.email))));
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.move.moveClassesOrPackages;
import com.intellij.ide.util.EditorHelper;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.PackageScope;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.MethodSignatureUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.refactoring.BaseRefactoringProcessor;
import com.intellij.refactoring.MoveDestination;
import com.intellij.refactoring.PackageWrapper;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.listeners.RefactoringElementListener;
import com.intellij.refactoring.listeners.RefactoringEventData;
import com.intellij.refactoring.move.MoveCallback;
import com.intellij.refactoring.move.MoveClassesOrPackagesCallback;
import com.intellij.refactoring.move.MoveMultipleElementsViewDescriptor;
import com.intellij.refactoring.rename.RenameUtil;
import com.intellij.refactoring.util.*;
import com.intellij.refactoring.util.classRefs.ClassInstanceScanner;
import com.intellij.refactoring.util.classRefs.ClassReferenceScanner;
import com.intellij.usageView.UsageInfo;
import com.intellij.usageView.UsageViewDescriptor;
import com.intellij.usageView.UsageViewUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.VisibilityUtil;
import java.util.HashMap;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author Jeka,dsl
*/
public class MoveClassesOrPackagesProcessor extends BaseRefactoringProcessor {
private static final Logger LOG = Logger.getInstance(
"#com.intellij.refactoring.move.moveClassesOrPackages.MoveClassesOrPackagesProcessor");
private final PsiElement[] myElementsToMove;
private boolean mySearchInComments;
private boolean mySearchInNonJavaFiles;
private final PackageWrapper myTargetPackage;
private final MoveCallback myMoveCallback;
protected @NotNull final MoveDestination myMoveDestination;
protected NonCodeUsageInfo[] myNonCodeUsages;
private boolean myOpenInEditor;
private MultiMap<PsiElement, String> myConflicts;
public MoveClassesOrPackagesProcessor(Project project,
PsiElement[] elements,
@NotNull final MoveDestination moveDestination,
boolean searchInComments,
boolean searchInNonJavaFiles,
MoveCallback moveCallback) {
super(project);
final Set<PsiElement> toMove = new LinkedHashSet<>();
for (PsiElement element : elements) {
PsiUtilCore.ensureValid(element);
if (element instanceof PsiClassOwner) {
for (PsiClass aClass : ((PsiClassOwner)element).getClasses()) {
PsiUtilCore.ensureValid(aClass);
toMove.add(aClass);
}
} else {
toMove.add(element);
}
}
myElementsToMove = PsiUtilCore.toPsiElementArray(toMove);
Arrays.sort(myElementsToMove, (o1, o2) -> {
if (o1 instanceof PsiClass && o2 instanceof PsiClass) {
final PsiFile containingFile = o1.getContainingFile();
if (Comparing.equal(containingFile, o2.getContainingFile())) {
final VirtualFile virtualFile = containingFile.getVirtualFile();
if (virtualFile != null) {
final String fileName = virtualFile.getNameWithoutExtension();
if (Comparing.strEqual(fileName, ((PsiClass)o1).getName())) return -1;
if (Comparing.strEqual(fileName, ((PsiClass)o2).getName())) return 1;
}
}
}
return 0;
});
myMoveDestination = moveDestination;
myTargetPackage = myMoveDestination.getTargetPackage();
mySearchInComments = searchInComments;
mySearchInNonJavaFiles = searchInNonJavaFiles;
myMoveCallback = moveCallback;
}
@NotNull
protected UsageViewDescriptor createUsageViewDescriptor(@NotNull UsageInfo[] usages) {
PsiElement[] elements = new PsiElement[myElementsToMove.length];
System.arraycopy(myElementsToMove, 0, elements, 0, myElementsToMove.length);
return new MoveMultipleElementsViewDescriptor(elements, MoveClassesOrPackagesUtil.getPackageName(myTargetPackage));
}
public boolean verifyValidPackageName() {
String qName = myTargetPackage.getQualifiedName();
if (!StringUtil.isEmpty(qName)) {
PsiNameHelper helper = PsiNameHelper.getInstance(myProject);
if (!helper.isQualifiedName(qName)) {
Messages.showMessageDialog(myProject, RefactoringBundle.message("invalid.target.package.name.specified"), "Invalid Package Name",
Messages.getErrorIcon());
return false;
}
}
return true;
}
private boolean hasClasses() {
for (PsiElement element : getElements()) {
if (element instanceof PsiClass) return true;
}
return false;
}
public boolean isSearchInComments() {
return mySearchInComments;
}
public boolean isSearchInNonJavaFiles() {
return mySearchInNonJavaFiles;
}
public void setSearchInComments(boolean searchInComments) {
mySearchInComments = searchInComments;
}
public void setSearchInNonJavaFiles(boolean searchInNonJavaFiles) {
mySearchInNonJavaFiles = searchInNonJavaFiles;
}
@NotNull
protected UsageInfo[] findUsages() {
final List<UsageInfo> allUsages = new ArrayList<>();
final List<UsageInfo> usagesToSkip = new ArrayList<>();
myConflicts = new MultiMap<>();
for (PsiElement element : myElementsToMove) {
String newName = getNewQName(element);
if (newName == null) continue;
final UsageInfo[] usages = MoveClassesOrPackagesUtil.findUsages(element, mySearchInComments,
mySearchInNonJavaFiles, newName);
final ArrayList<UsageInfo> infos = new ArrayList<>(Arrays.asList(usages));
allUsages.addAll(infos);
if (Comparing.strEqual(newName, getOldQName(element))) {
usagesToSkip.addAll(infos);
}
if (element instanceof PsiPackage) {
for (PsiDirectory directory : ((PsiPackage)element).getDirectories()) {
final UsageInfo[] dirUsages = MoveClassesOrPackagesUtil.findUsages(directory, mySearchInComments,
mySearchInNonJavaFiles, newName);
allUsages.addAll(new ArrayList<>(Arrays.asList(dirUsages)));
}
}
}
myMoveDestination.analyzeModuleConflicts(Arrays.asList(myElementsToMove), myConflicts,
allUsages.toArray(UsageInfo.EMPTY_ARRAY));
final UsageInfo[] usageInfos = allUsages.toArray(UsageInfo.EMPTY_ARRAY);
detectPackageLocalsMoved(usageInfos, myConflicts);
detectPackageLocalsUsed(myConflicts, myElementsToMove, myTargetPackage);
allUsages.removeAll(usagesToSkip);
return UsageViewUtil.removeDuplicatedUsages(allUsages.toArray(UsageInfo.EMPTY_ARRAY));
}
public List<PsiElement> getElements() {
return Collections.unmodifiableList(Arrays.asList(myElementsToMove));
}
public PackageWrapper getTargetPackage() {
return myMoveDestination.getTargetPackage();
}
public void setOpenInEditor(boolean openInEditor) {
myOpenInEditor = openInEditor;
}
@Nullable
@Override
protected String getRefactoringId() {
return "refactoring.move";
}
@Nullable
@Override
protected RefactoringEventData getBeforeData() {
RefactoringEventData data = new RefactoringEventData();
data.addElements(myElementsToMove);
return data;
}
@Nullable
@Override
protected RefactoringEventData getAfterData(@NotNull UsageInfo[] usages) {
RefactoringEventData data = new RefactoringEventData();
data.addElements(myTargetPackage.getDirectories());
data.addElement(JavaPsiFacade.getInstance(myProject).findPackage(myTargetPackage.getQualifiedName()));
return data;
}
protected boolean preprocessUsages(@NotNull Ref<UsageInfo[]> refUsages) {
final UsageInfo[] usages = refUsages.get();
return showConflicts(myConflicts, usages);
}
private boolean isInsideMoved(PsiElement place) {
for (PsiElement element : myElementsToMove) {
if (element instanceof PsiClass) {
if (PsiTreeUtil.isAncestor(element, place, false)) return true;
}
}
return false;
}
static void detectPackageLocalsUsed(final MultiMap<PsiElement, String> conflicts,
PsiElement[] elementsToMove,
PackageWrapper targetPackage) {
PackageLocalsUsageCollector visitor = new PackageLocalsUsageCollector(elementsToMove, targetPackage, conflicts);
for (PsiElement element : elementsToMove) {
if (element.getContainingFile() != null) {
element.accept(visitor);
}
}
}
private void detectPackageLocalsMoved(final UsageInfo[] usages, final MultiMap<PsiElement, String> conflicts) {
// final HashSet reportedPackageLocalUsed = new HashSet();
final HashSet<PsiClass> movedClasses = new HashSet<>();
final HashMap<PsiClass,HashSet<PsiElement>> reportedClassToContainers = new HashMap<>();
final PackageWrapper aPackage = myTargetPackage;
for (UsageInfo usage : usages) {
PsiElement element = usage.getElement();
if (element == null) continue;
if (usage instanceof MoveRenameUsageInfo && !(usage instanceof NonCodeUsageInfo) &&
((MoveRenameUsageInfo)usage).getReferencedElement() instanceof PsiClass) {
PsiClass aClass = (PsiClass)((MoveRenameUsageInfo)usage).getReferencedElement();
if (!movedClasses.contains(aClass)) {
movedClasses.add(aClass);
}
if (aClass != null && aClass.hasModifierProperty(PsiModifier.PACKAGE_LOCAL)) {
if (PsiTreeUtil.getParentOfType(element, PsiImportStatement.class) != null) continue;
PsiElement container = ConflictsUtil.getContainer(element);
HashSet<PsiElement> reported = reportedClassToContainers.get(aClass);
if (reported == null) {
reported = new HashSet<>();
reportedClassToContainers.put(aClass, reported);
}
if (!reported.contains(container)) {
reported.add(container);
PsiFile containingFile = element.getContainingFile();
if (containingFile != null && !isInsideMoved(element)) {
PsiDirectory directory = containingFile.getContainingDirectory();
if (directory != null) {
PsiPackage usagePackage = JavaDirectoryService.getInstance().getPackage(directory);
if (aPackage != null && usagePackage != null && !aPackage.equalToPackage(usagePackage)) {
final String message = RefactoringBundle.message("a.package.local.class.0.will.no.longer.be.accessible.from.1",
CommonRefactoringUtil.htmlEmphasize(aClass.getName()),
RefactoringUIUtil.getDescription(
container, true));
conflicts.putValue(aClass, message);
}
}
}
}
}
}
}
final MyClassInstanceReferenceVisitor instanceReferenceVisitor = new MyClassInstanceReferenceVisitor(conflicts);
for (final PsiClass aClass : movedClasses) {
String visibility = VisibilityUtil.getVisibilityModifier(aClass.getModifierList());
if (PsiModifier.PACKAGE_LOCAL.equals(visibility)) {
findInstancesOfPackageLocal(aClass, usages, instanceReferenceVisitor);
}
else {
// public classes
findPublicClassConflicts(aClass, instanceReferenceVisitor);
}
}
}
static class ClassMemberWrapper {
final PsiNamedElement myElement;
final PsiModifierListOwner myMember;
public ClassMemberWrapper(PsiNamedElement element) {
myElement = element;
myMember = (PsiModifierListOwner) element;
}
PsiModifierListOwner getMember() {
return myMember;
}
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ClassMemberWrapper)) return false;
ClassMemberWrapper wrapper = (ClassMemberWrapper)o;
if (myElement instanceof PsiMethod) {
return wrapper.myElement instanceof PsiMethod &&
MethodSignatureUtil.areSignaturesEqual((PsiMethod) myElement, (PsiMethod) wrapper.myElement);
}
return Comparing.equal(myElement.getName(), wrapper.myElement.getName());
}
public int hashCode() {
final String name = myElement.getName();
if (name != null) {
return name.hashCode();
}
else {
return 0;
}
}
}
private static void findPublicClassConflicts(PsiClass aClass, final MyClassInstanceReferenceVisitor instanceReferenceVisitor) {
//noinspection MismatchedQueryAndUpdateOfCollection
NonPublicClassMemberWrappersSet members = new NonPublicClassMemberWrappersSet();
members.addElements(aClass.getFields());
members.addElements(aClass.getMethods());
members.addElements(aClass.getInnerClasses());
final RefactoringUtil.IsDescendantOf isDescendantOf = new RefactoringUtil.IsDescendantOf(aClass);
final PsiPackage aPackage = JavaDirectoryService.getInstance().getPackage(aClass.getContainingFile().getContainingDirectory());
final GlobalSearchScope packageScope = aPackage == null ? aClass.getResolveScope() : PackageScope.packageScopeWithoutLibraries(aPackage, false);
for (final ClassMemberWrapper memberWrapper : members) {
ReferencesSearch.search(memberWrapper.getMember(), packageScope, false).forEach(reference -> {
final PsiElement element = reference.getElement();
if (element instanceof PsiReferenceExpression) {
final PsiReferenceExpression expression = (PsiReferenceExpression)element;
final PsiExpression qualifierExpression = expression.getQualifierExpression();
if (qualifierExpression != null) {
final PsiType type = qualifierExpression.getType();
if (type != null) {
final PsiClass resolvedTypeClass = PsiUtil.resolveClassInType(type);
if (isDescendantOf.value(resolvedTypeClass)) {
instanceReferenceVisitor.visitMemberReference(memberWrapper.getMember(), expression, isDescendantOf);
}
}
}
else {
instanceReferenceVisitor.visitMemberReference(memberWrapper.getMember(), expression, isDescendantOf);
}
}
return true;
});
}
}
private static void findInstancesOfPackageLocal(final PsiClass aClass,
final UsageInfo[] usages,
final MyClassInstanceReferenceVisitor instanceReferenceVisitor) {
ClassReferenceScanner referenceScanner = new ClassReferenceScanner(aClass) {
public PsiReference[] findReferences() {
ArrayList<PsiReference> result = new ArrayList<>();
for (UsageInfo usage : usages) {
if (usage instanceof MoveRenameUsageInfo && ((MoveRenameUsageInfo)usage).getReferencedElement() == aClass) {
final PsiReference reference = usage.getReference();
if (reference != null) {
result.add(reference);
}
}
}
return result.toArray(PsiReference.EMPTY_ARRAY);
}
};
referenceScanner.processReferences(new ClassInstanceScanner(aClass, instanceReferenceVisitor));
}
@Nullable
private String getNewQName(PsiElement element) {
final String qualifiedName = myTargetPackage.getQualifiedName();
if (element instanceof PsiClass) {
return StringUtil.getQualifiedName(qualifiedName, ((PsiClass)element).getName());
}
else if (element instanceof PsiPackage) {
return StringUtil.getQualifiedName(qualifiedName, ((PsiPackage)element).getName());
}
else {
LOG.assertTrue(false);
return null;
}
}
@Nullable
private String getOldQName(PsiElement element) {
if (element instanceof PsiClass) {
return ((PsiClass)element).getQualifiedName();
}
else if (element instanceof PsiPackage) {
return ((PsiPackage)element).getQualifiedName();
}
else {
LOG.assertTrue(false);
return null;
}
}
@Override
protected void refreshElements(@NotNull PsiElement[] elements) {
LOG.assertTrue(elements.length == myElementsToMove.length);
System.arraycopy(elements, 0, myElementsToMove, 0, elements.length);
}
@Override
protected boolean isPreviewUsages(@NotNull UsageInfo[] usages) {
if (UsageViewUtil.reportNonRegularUsages(usages, myProject)) {
return true;
}
else {
return super.isPreviewUsages(usages);
}
}
protected void performRefactoring(@NotNull UsageInfo[] usages) {
// If files are being moved then I need to collect some information to delete these
// files from CVS. I need to know all common parents of the moved files and relative
// paths.
// Move files with correction of references.
ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
if (indicator != null) {
indicator.setIndeterminate(false);
}
try {
final Map<PsiClass, Boolean> allClasses = new HashMap<>();
for (PsiElement element : myElementsToMove) {
if (element instanceof PsiClass) {
final PsiClass psiClass = (PsiClass)element;
if (allClasses.containsKey(psiClass)) {
continue;
}
for (MoveAllClassesInFileHandler fileHandler : Extensions.getExtensions(MoveAllClassesInFileHandler.EP_NAME)) {
fileHandler.processMoveAllClassesInFile(allClasses, psiClass, myElementsToMove);
}
}
}
for (PsiElement element : myElementsToMove) {
if (element instanceof PsiClass) {
MoveClassesOrPackagesUtil.prepareMoveClass((PsiClass)element);
}
}
final Map<PsiElement, PsiElement> oldToNewElementsMapping = new HashMap<>();
for (int idx = 0; idx < myElementsToMove.length; idx++) {
PsiElement element = myElementsToMove[idx];
final RefactoringElementListener elementListener = getTransaction().getElementListener(element);
if (element instanceof PsiPackage) {
final PsiDirectory[] directories = ((PsiPackage)element).getDirectories();
final PsiPackage newElement = MoveClassesOrPackagesUtil.doMovePackage((PsiPackage)element, myMoveDestination);
LOG.assertTrue(newElement != null, element);
oldToNewElementsMapping.put(element, newElement);
int i = 0;
final PsiDirectory[] newDirectories = newElement.getDirectories();
if (newDirectories.length == 1) {//everything is moved in one directory
for (PsiDirectory directory : directories) {
oldToNewElementsMapping.put(directory, newDirectories[0]);
}
} else {
for (PsiDirectory directory : directories) {
if (myMoveDestination.verify(directory) != null) {
//e.g. directory is excluded so there is no source root for it, hence target directory would be missed from newDirectories
continue;
}
oldToNewElementsMapping.put(directory, newDirectories[i++]);
}
}
element = newElement;
}
else if (element instanceof PsiClass) {
final PsiClass psiClass = (PsiClass)element;
final PsiClass newElement = MoveClassesOrPackagesUtil.doMoveClass(psiClass, myMoveDestination.getTargetDirectory(element.getContainingFile()), allClasses.get(psiClass));
oldToNewElementsMapping.put(element, newElement);
element = newElement;
} else {
LOG.error("Unexpected element to move: " + element);
}
elementListener.elementMoved(element);
myElementsToMove[idx] = element;
}
myNonCodeUsages = CommonMoveUtil.retargetUsages(usages, oldToNewElementsMapping);
for (PsiElement element : myElementsToMove) {
if (element instanceof PsiClass) {
MoveClassesOrPackagesUtil.finishMoveClass((PsiClass)element);
}
}
if (myOpenInEditor) {
ApplicationManager.getApplication().invokeLater(() -> EditorHelper.openFilesInEditor(Arrays.stream(myElementsToMove).filter(PsiElement::isValid).toArray(PsiElement[]::new)));
}
}
catch (IncorrectOperationException e) {
myNonCodeUsages = new NonCodeUsageInfo[0];
RefactoringUIUtil.processIncorrectOperation(myProject, e);
}
}
@Override
protected void performPsiSpoilingRefactoring() {
RenameUtil.renameNonCodeUsages(myProject, myNonCodeUsages);
if (myMoveCallback != null) {
if (myMoveCallback instanceof MoveClassesOrPackagesCallback) {
((MoveClassesOrPackagesCallback) myMoveCallback).classesOrPackagesMoved(myMoveDestination);
}
myMoveCallback.refactoringCompleted();
}
}
@NotNull
protected String getCommandName() {
String elements = RefactoringUIUtil.calculatePsiElementDescriptionList(myElementsToMove);
String target = myTargetPackage.getQualifiedName();
return RefactoringBundle.message("move.classes.command", elements, target);
}
private class MyClassInstanceReferenceVisitor implements ClassInstanceScanner.ClassInstanceReferenceVisitor {
private final MultiMap<PsiElement, String> myConflicts;
private final HashMap<PsiModifierListOwner,HashSet<PsiElement>> myReportedElementToContainer = new HashMap<>();
private final HashMap<PsiClass, RefactoringUtil.IsDescendantOf> myIsDescendantOfCache = new HashMap<>();
public MyClassInstanceReferenceVisitor(MultiMap<PsiElement, String> conflicts) {
myConflicts = conflicts;
}
public void visitQualifier(PsiReferenceExpression qualified,
PsiExpression instanceRef,
PsiElement referencedInstance) {
PsiElement resolved = qualified.resolve();
if (resolved instanceof PsiMember) {
final PsiMember member = (PsiMember)resolved;
final PsiClass containingClass = member.getContainingClass();
RefactoringUtil.IsDescendantOf isDescendantOf = myIsDescendantOfCache.get(containingClass);
if (isDescendantOf == null) {
isDescendantOf = new RefactoringUtil.IsDescendantOf(containingClass);
myIsDescendantOfCache.put(containingClass, isDescendantOf);
}
visitMemberReference(member, qualified, isDescendantOf);
}
}
private synchronized void visitMemberReference(final PsiModifierListOwner member, PsiReferenceExpression qualified, final RefactoringUtil.IsDescendantOf descendantOf) {
if (member.hasModifierProperty(PsiModifier.PACKAGE_LOCAL)) {
visitPackageLocalMemberReference(qualified, member);
} else if (member.hasModifierProperty(PsiModifier.PROTECTED)) {
final PsiExpression qualifier = qualified.getQualifierExpression();
if (qualifier != null && !(qualifier instanceof PsiThisExpression) && !(qualifier instanceof PsiSuperExpression)) {
visitPackageLocalMemberReference(qualified, member);
} else {
if (!isInInheritor(qualified, descendantOf)) {
visitPackageLocalMemberReference(qualified, member);
}
}
}
}
private boolean isInInheritor(PsiReferenceExpression qualified, final RefactoringUtil.IsDescendantOf descendantOf) {
PsiClass aClass = PsiTreeUtil.getParentOfType(qualified, PsiClass.class);
while (aClass != null) {
if (descendantOf.value(aClass)) return true;
aClass = PsiTreeUtil.getParentOfType(aClass, PsiClass.class);
}
return false;
}
private void visitPackageLocalMemberReference(PsiJavaCodeReferenceElement qualified, PsiModifierListOwner member) {
PsiElement container = ConflictsUtil.getContainer(qualified);
HashSet<PsiElement> reportedContainers = myReportedElementToContainer.get(member);
if (reportedContainers == null) {
reportedContainers = new HashSet<>();
myReportedElementToContainer.put(member, reportedContainers);
}
if (!reportedContainers.contains(container)) {
reportedContainers.add(container);
if (!isInsideMoved(container)) {
PsiFile containingFile = container.getContainingFile();
if (containingFile != null) {
PsiDirectory directory = containingFile.getContainingDirectory();
if (directory != null) {
PsiPackage aPackage = JavaDirectoryService.getInstance().getPackage(directory);
if (!myTargetPackage.equalToPackage(aPackage)) {
String message = RefactoringBundle.message("0.will.be.inaccessible.from.1", RefactoringUIUtil.getDescription(member, true),
RefactoringUIUtil.getDescription(container, true));
myConflicts.putValue(member, CommonRefactoringUtil.capitalize(message));
}
}
}
}
}
}
public void visitTypeCast(PsiTypeCastExpression typeCastExpression,
PsiExpression instanceRef,
PsiElement referencedInstance) {
}
public void visitReadUsage(PsiExpression instanceRef, PsiType expectedType, PsiElement referencedInstance) {
}
public void visitWriteUsage(PsiExpression instanceRef, PsiType assignedType, PsiElement referencedInstance) {
}
}
private static class NonPublicClassMemberWrappersSet extends HashSet<ClassMemberWrapper> {
public void addElement(PsiMember member) {
final PsiNamedElement namedElement = (PsiNamedElement)member;
if (member.hasModifierProperty(PsiModifier.PUBLIC)) return;
if (member.hasModifierProperty(PsiModifier.PRIVATE)) return;
add(new ClassMemberWrapper(namedElement));
}
public void addElements(PsiMember[] members) {
for (PsiMember member : members) {
addElement(member);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.omg.CORBA;
//
// IDL:omg.org/CORBA/AliasDef:1.0
//
public class _AliasDefStub extends org.omg.CORBA.portable.ObjectImpl
implements AliasDef
{
private static final String[] _ob_ids_ =
{
"IDL:omg.org/CORBA/AliasDef:1.0",
"IDL:omg.org/CORBA/TypedefDef:1.0",
"IDL:omg.org/CORBA/Contained:1.0",
"IDL:omg.org/CORBA/IRObject:1.0",
"IDL:omg.org/CORBA/IDLType:1.0"
};
public String[]
_ids()
{
return _ob_ids_;
}
final public static java.lang.Class _ob_opsClass = AliasDefOperations.class;
//
// IDL:omg.org/CORBA/AliasDef/original_type_def:1.0
//
public IDLType
original_type_def()
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("_get_original_type_def", true);
in = _invoke(out);
IDLType _ob_r = IDLTypeHelper.read(in);
return _ob_r;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("original_type_def", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
return _ob_self.original_type_def();
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
public void
original_type_def(IDLType _ob_a)
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("_set_original_type_def", true);
IDLTypeHelper.write(out, _ob_a);
in = _invoke(out);
return;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("original_type_def", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
_ob_self.original_type_def(_ob_a);
return;
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
//
// IDL:omg.org/CORBA/Contained/id:1.0
//
public String
id()
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("_get_id", true);
in = _invoke(out);
String _ob_r = RepositoryIdHelper.read(in);
return _ob_r;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("id", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
return _ob_self.id();
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
public void
id(String _ob_a)
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("_set_id", true);
RepositoryIdHelper.write(out, _ob_a);
in = _invoke(out);
return;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("id", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
_ob_self.id(_ob_a);
return;
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
//
// IDL:omg.org/CORBA/Contained/name:1.0
//
public String
name()
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("_get_name", true);
in = _invoke(out);
String _ob_r = IdentifierHelper.read(in);
return _ob_r;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("name", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
return _ob_self.name();
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
public void
name(String _ob_a)
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("_set_name", true);
IdentifierHelper.write(out, _ob_a);
in = _invoke(out);
return;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("name", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
_ob_self.name(_ob_a);
return;
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
//
// IDL:omg.org/CORBA/Contained/version:1.0
//
public String
version()
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("_get_version", true);
in = _invoke(out);
String _ob_r = VersionSpecHelper.read(in);
return _ob_r;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("version", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
return _ob_self.version();
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
public void
version(String _ob_a)
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("_set_version", true);
VersionSpecHelper.write(out, _ob_a);
in = _invoke(out);
return;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("version", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
_ob_self.version(_ob_a);
return;
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
//
// IDL:omg.org/CORBA/Contained/defined_in:1.0
//
public Container
defined_in()
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("_get_defined_in", true);
in = _invoke(out);
Container _ob_r = ContainerHelper.read(in);
return _ob_r;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("defined_in", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
return _ob_self.defined_in();
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
//
// IDL:omg.org/CORBA/Contained/absolute_name:1.0
//
public String
absolute_name()
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("_get_absolute_name", true);
in = _invoke(out);
String _ob_r = ScopedNameHelper.read(in);
return _ob_r;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("absolute_name", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
return _ob_self.absolute_name();
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
//
// IDL:omg.org/CORBA/Contained/containing_repository:1.0
//
public Repository
containing_repository()
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("_get_containing_repository", true);
in = _invoke(out);
Repository _ob_r = RepositoryHelper.read(in);
return _ob_r;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("containing_repository", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
return _ob_self.containing_repository();
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
//
// IDL:omg.org/CORBA/IRObject/def_kind:1.0
//
public DefinitionKind
def_kind()
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("_get_def_kind", true);
in = _invoke(out);
DefinitionKind _ob_r = DefinitionKindHelper.read(in);
return _ob_r;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("def_kind", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
return _ob_self.def_kind();
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
//
// IDL:omg.org/CORBA/IDLType/type:1.0
//
public org.omg.CORBA.TypeCode
type()
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("_get_type", true);
in = _invoke(out);
org.omg.CORBA.TypeCode _ob_r = in.read_TypeCode();
return _ob_r;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("type", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
return _ob_self.type();
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
//
// IDL:omg.org/CORBA/Contained/describe:1.0
//
public org.omg.CORBA.ContainedPackage.Description
describe()
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("describe", true);
in = _invoke(out);
org.omg.CORBA.ContainedPackage.Description _ob_r = org.omg.CORBA.ContainedPackage.DescriptionHelper.read(in);
return _ob_r;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
in = _ob_aex.getInputStream();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("describe", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
return _ob_self.describe();
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
//
// IDL:omg.org/CORBA/Contained/move:1.0
//
public void
move(Container _ob_a0,
String _ob_a1,
String _ob_a2)
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("move", true);
ContainerHelper.write(out, _ob_a0);
IdentifierHelper.write(out, _ob_a1);
VersionSpecHelper.write(out, _ob_a2);
in = _invoke(out);
return;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
in = _ob_aex.getInputStream();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("move", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
_ob_self.move(_ob_a0, _ob_a1, _ob_a2);
return;
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
//
// IDL:omg.org/CORBA/IRObject/destroy:1.0
//
public void
destroy()
{
while(true)
{
if(!this._is_local())
{
org.omg.CORBA.portable.OutputStream out = null;
org.omg.CORBA.portable.InputStream in = null;
try
{
out = _request("destroy", true);
in = _invoke(out);
return;
}
catch(org.omg.CORBA.portable.RemarshalException _ob_ex)
{
continue;
}
catch(org.omg.CORBA.portable.ApplicationException _ob_aex)
{
final String _ob_id = _ob_aex.getId();
in = _ob_aex.getInputStream();
throw new org.omg.CORBA.UNKNOWN("Unexpected User Exception: " + _ob_id);
}
finally
{
_releaseReply(in);
}
}
else
{
org.omg.CORBA.portable.ServantObject _ob_so = _servant_preinvoke("destroy", _ob_opsClass);
if(_ob_so == null)
continue;
AliasDefOperations _ob_self = (AliasDefOperations)_ob_so.servant;
try
{
_ob_self.destroy();
return;
}
finally
{
_servant_postinvoke(_ob_so);
}
}
}
}
}
| |
/*
* Copyright 2015 Manish Ahluwalia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.manishahluwalia.gwt.rpcwrapper.client;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.google.gwt.http.client.Request;
import com.google.gwt.http.client.RequestBuilder;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.RequestException;
import com.google.gwt.http.client.Response;
import com.google.gwt.user.client.rpc.InvocationException;
import com.google.gwt.user.client.rpc.RpcRequestBuilder;
/**
* We want most of our GWT-RPC calls to have the following characteristics:
* <ol>
* <li>CSRF safe</li>
* <li>Restart after logging us back in if the server restarted and lost our
* login session</li>
* <li>Do all of this transparently to the calling client code and to the called
* server code</li>
* </ol>
* This class helps us do all of the above.
* @param <T>
*/
public class WrappedRpcBuilder<T> extends RpcRequestBuilder
{
private static final Logger logger = Logger.getLogger(WrappedRpcBuilder.class.getName());
private final RpcWrapper<T> wrapper;
public WrappedRpcBuilder(RpcWrapper<T> wrapper) {
this.wrapper = wrapper;
logger.finest("Creating new " + this.getClass().getName() + " " + this);
}
public class RepeatingCallback implements RequestCallback
{
private final RepeatingRequestBuilder requestBuilder;
private final RequestCallback realCallback;
private String rpcName;
public RepeatingCallback (RepeatingRequestBuilder rb, RequestCallback rc)
{
logger.finest("Creating new RepeatingCallback");
this.requestBuilder = rb;
this.realCallback = rc;
}
public void setRpcName(String rpcName)
{
this.rpcName = rpcName;
}
//@Override
public void onResponseReceived (final Request request,
final Response response)
{
logger.finest("Got a response");
wrapper.getListener().onBeforeResponseProcessing(rpcName, request, response, requestBuilder.getOpaqueRequestInfoHolder(), new ProceedCallback<ResponseDisposition>() {
//@Override
public void proceed(ResponseDisposition disposition) {
switch (disposition) {
case PROCESS:
logger.finest("Told to process response");
realCallback.onResponseReceived(request, response);
break;
case REPEAT:
logger.finest("Told to repeat response");
try {
requestBuilder.reSend(RepeatingCallback.this);
} catch (RequestException e) {
logger.log(Level.SEVERE, "Got an error while re-sending request", e);
realCallback.onError(request, new InvocationException("Got an error while re-sending request"));
}
break;
case RETURN_EXCEPTION:
logger.finest("Told to throw an error response");
Throwable exception = disposition.getException();
assert null!=exception : "For disposition " + disposition + " exception cannot be null";
realCallback.onError(request, exception);
break;
}
return;
}
});
}
//@Override
public void onError (final Request request, final Throwable exception)
{
logger.finest("Got an error response");
wrapper.getListener().onBeforeErrorProcessing(request, exception, requestBuilder.getOpaqueRequestInfoHolder(), new ProceedCallback<ErrorDisposition>() {
//@Override
public void proceed(ErrorDisposition disposition) {
switch (disposition) {
case PROCESS:
logger.finest("Told to process error response");
realCallback.onError(request, exception);
break;
case REPEAT:
try {
logger.finest("Told to repeat call on error response");
requestBuilder.reSend(RepeatingCallback.this);
} catch (RequestException e) {
logger.log(Level.SEVERE, "Got an error while re-sending request", e);
realCallback.onError(request, new InvocationException("Got an error while re-sending request"));
}
break;
case REPLACE_ERROR:
logger.finest("Told to replace error response");
Throwable exception = disposition.getException();
assert null!=exception : "For disposition " + disposition + " exception cannot be null";
realCallback.onError(request, exception);
break;
}
}
});
}
}
public class RepeatingRequestBuilder extends RequestBuilder
{
public RepeatingRequestBuilder (RequestBuilder rb)
{
super(rb.getHTTPMethod(), rb.getUrl());
logger.finest("Creating new Request builder");
this.opaqueRequestInfoHolder = wrapper.getListener().getNewOpaqueRequestInfoHolder();
}
private String requestData;
private String rpcName = null;
private T opaqueRequestInfoHolder;
public void reSend (RequestCallback rc) throws RequestException
{
logger.finest("reSend()");
wrapper.getListener().onBeforeRequestResend(opaqueRequestInfoHolder);
sendRequest(requestData, rc);
}
@Override
public Request send () throws RequestException
{
logger.finest("send()");
wrapper.getListener().onBeforeRequestSend(this, opaqueRequestInfoHolder);
return super.send();
}
@Override
public Request sendRequest (String rd, RequestCallback rc) throws RequestException
{
logger.finest("sendRequest()");
wrapper.getListener().onBeforeRequestSend(this, opaqueRequestInfoHolder);
requestData = rd;
@SuppressWarnings("unchecked")
RepeatingCallback rrc = (WrappedRpcBuilder<T>.RepeatingCallback) rc;
rrc.setRpcName(rpcName);
return super.sendRequest(rd, rc);
}
@Override
public void setRequestData (String s)
{
logger.finest("setRequestData()");
requestData = s;
super.setRequestData(s);
this.rpcName = getRpcName(s);
wrapper.getListener().onRequestDataSet(opaqueRequestInfoHolder, rpcName);
}
@Override
public void setCallback(RequestCallback rc) {
logger.finest("setCallback()");
@SuppressWarnings("unchecked")
RepeatingCallback rrc = (WrappedRpcBuilder<T>.RepeatingCallback) rc;
rrc.setRpcName(rpcName);
super.setCallback(rc);
}
/**
* Given the serialized request data for an RPC, this routine constructs
* the RPC name (RemoteServiceInterface.method).
* <p/>
* This is done in javascript because JS and GWT Java split functions
* behave differently.
* <p/>
* This is sensitive to the actual _internal_ implementation of GWT and
* must be updated if the GWT version changes.
*/
private native String getRpcName (String requestData)
/*-{
parts = requestData.split("|", 7);
return parts[5]+"."+parts[6];
}-*/;
public T getOpaqueRequestInfoHolder ()
{
return opaqueRequestInfoHolder;
}
}
@Override
public RequestBuilder doCreate (String s)
{
logger.finest("doCreate()");
return new RepeatingRequestBuilder(super.doCreate(s));
}
@Override
public void doFinish (RequestBuilder rb)
{
logger.finest("doFinish()");
super.doFinish(rb);
}
@Override
public void doSetCallback (RequestBuilder rb, RequestCallback rc)
{
logger.finest("doSetCallback()");
@SuppressWarnings("unchecked")
RepeatingRequestBuilder rrb = (WrappedRpcBuilder<T>.RepeatingRequestBuilder) rb;
super.doSetCallback(rrb, new RepeatingCallback(rrb, rc));
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package utils;
import java.math.RoundingMode;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* This class provides some basic mathematical functionality.
*
* @author Haitham Seada
*/
public class Mathematics {
/**
* Default Precision
*/
public static final double EPSILON = 1e-10;//1e-100;
public static double approximate(double x) {
int decimalPlaces = 0;
if (Mathematics.compare(x, 0) != 0) {
decimalPlaces = getPrecisionDecimalPlacesCount();
}
return approximate(x, decimalPlaces);
}
public static int getPrecisionDecimalPlacesCount() {
int decimalPlaces = 0;
double tempEpsilon = EPSILON;
while (Math.abs(tempEpsilon) < 1) {
tempEpsilon *= 10;
decimalPlaces++;
}
return decimalPlaces;
}
public static int getPrecisionDecimalPlacesCount(double epsilon) {
int decimalPlaces = 0;
while (Math.abs(epsilon) < 1) {
epsilon *= 10;
decimalPlaces++;
}
return decimalPlaces;
}
public static double approximate(double x, int decimalPlaces) {
if (decimalPlaces == 0) {
return x;
}
// double temp = Math.pow(10, decimalPlaces-1);
// double temp2 = x * temp;
// double numerator = Math.round(temp2);
// double denominator = Math.pow(10, decimalPlaces-1);
// return numerator / denominator;
String approxString = "#.";
for (int i = 0; i < decimalPlaces; i++) {
approxString += "#";
}
DecimalFormat df = new DecimalFormat(approxString);
df.setRoundingMode(RoundingMode.CEILING);
return Double.parseDouble(df.format(x));
}
public static int nchoosek(int n, int k) {
int i;
double prod;
prod = 1.0;
if (n == 0 && k == 0) {
return 1;
} else {
for (i = 1; i <= k; i++) {
prod = prod * (double) ((double) (n + 1 - i) / (double) i);
}
return (int) (prod + 0.5);
}
}
public static int compare(double num1, double num2) {
return compare(num1, num2, EPSILON);
}
public static int compare(double num1, double num2, double delta) {
if (Math.abs(num1 - num2) < delta) {
return 0;
}
if (num1 > num2) {
return 1;
} else {
return -1;
}
}
// Gaussian elimination with partial pivoting
public static double[] gaussianElimination(double[][] A, double[] b)
throws
SingularMatrixException {
int N = b.length;
for (int p = 0; p < N; p++) {
// find pivot row and swap
int max = p;
for (int i = p + 1; i < N; i++) {
if (Math.abs(A[i][p]) > Math.abs(A[max][p])) {
max = i;
}
}
double[] temp = A[p];
A[p] = A[max];
A[max] = temp;
double t = b[p];
b[p] = b[max];
b[max] = t;
// singular or nearly singular
if (Math.abs(A[p][p]) <= EPSILON) {
throw new SingularMatrixException();
}
// pivot within A and b
for (int i = p + 1; i < N; i++) {
double alpha = A[i][p] / A[p][p];
b[i] -= alpha * b[p];
for (int j = p; j < N; j++) {
A[i][j] -= alpha * A[p][j];
}
}
}
// back substitution
double[] x = new double[N];
for (int i = N - 1; i >= 0; i--) {
double sum = 0.0;
for (int j = i + 1; j < N; j++) {
sum += A[i][j] * x[j];
}
x[i] = (b[i] - sum) / A[i][i];
}
return x;
}
static double getAverage(double[] values) {
double sum = 0;
for (int i = 0; i < values.length; i++) {
sum += values[i];
}
return sum / values.length;
}
public static double[] getApproximateCopy(double[] arr, int decimalPlaces) {
double[] arrCopy = new double[arr.length];
for (int i = 0; i < arr.length; i++) {
if (arr[i] < Math.pow(10, -1 * decimalPlaces)) {
arrCopy[i] = 0;
} else {
arrCopy[i] = arr[i];
}
}
return arrCopy;
}
public static double[] getVector(double[] p1, double[] p2) {
double[] v = new double[p1.length];
for (int i = 0; i < p1.length; i++) {
v[i] = p2[i] - p1[i];
}
return v;
}
public static class SingularMatrixException extends Exception {
public SingularMatrixException() {
super("Matrix is singular or nearly singular");
}
public SingularMatrixException(String message) {
super(message);
}
public String toString() {
return getMessage();
}
}
public static double getNonNegativesAverage(double[] arr) {
double average = 0.0;
int count = 0;
for (double num : arr) {
if (num >= 0) {
average += num;
count++;
}
}
if (count == 0) {
return -1;
}
return average / count;
}
public static double getNonNegativesAverage(int[] arr) {
double[] dArr = new double[arr.length];
for (int i = 0; i < arr.length; i++) {
dArr[i] = arr[i];
}
return getNonNegativesAverage(dArr);
}
public static int getMinIndex(double[] arr) {
int counter = 0;
while (counter < arr.length && Double.isNaN(arr[counter])) {
counter++;
}
if (counter == arr.length) {
// All array elements are NANs
return -1;
} else {
int index = counter++;
while (counter < arr.length) {
if (arr[counter] < arr[index]) {
index = counter;
}
counter++;
}
return index;
}
}
public static int getMinIndex(int[] arr) {
double[] dArr = new double[arr.length];
for (int i = 0; i < arr.length; i++) {
dArr[i] = arr[i];
}
return getMinIndex(dArr);
}
public static int getMaxIndex(double[] arr) {
int counter = 0;
while (counter < arr.length && Double.isNaN(arr[counter])) {
counter++;
}
if (counter == arr.length) {
// All array elements are NANs
return -1;
} else {
int index = counter++;
while (counter < arr.length) {
if (arr[counter] > arr[index]) {
index = counter;
}
counter++;
}
return index;
}
}
public static int getMaxIndex(int[] arr) {
double[] dArr = new double[arr.length];
for (int i = 0; i < arr.length; i++) {
dArr[i] = arr[i];
}
return getMaxIndex(dArr);
}
public static int getMedianIndex(double[] arr) {
List<IndexValuePair> indexValuePairs = new ArrayList<>();
for (int i = 0; i < arr.length; i++) {
if (Double.isNaN(arr[i])) {
// Ignore NaN values
continue;
}
indexValuePairs.add(new IndexValuePair(i, arr[i]));
}
if (indexValuePairs.isEmpty()) {
return -1;
}
Collections.sort(indexValuePairs);
return indexValuePairs.get(indexValuePairs.size() / 2).index;
}
public static int getMedianIndex(int[] arr) {
double[] dArr = new double[arr.length];
for (int i = 0; i < arr.length; i++) {
dArr[i] = arr[i];
}
return getMedianIndex(dArr);
}
public static int getNonNegativesMedianIndex(double[] arr) {
List<IndexValuePair> indexValuePairs = new ArrayList<>();
for (int i = 0; i < arr.length; i++) {
if (Double.isNaN(arr[i])) {
// Ignore NaN values
continue;
}
if (arr[i] >= 0) {
// Consider onle non-negative values (i.e. exclude -ve values
// and consider only Zero and positive values)
indexValuePairs.add(new IndexValuePair(i, arr[i]));
}
}
if (indexValuePairs.isEmpty()) {
return -1;
}
Collections.sort(indexValuePairs);
return indexValuePairs.get(indexValuePairs.size() / 2).index;
}
public static int getNonNegativesMedianIndex(int[] arr) {
double[] dArr = new double[arr.length];
for (int i = 0; i < arr.length; i++) {
dArr[i] = arr[i];
}
return getNonNegativesMedianIndex(dArr);
}
public static double getMin(double[] arr) {
int index = getMinIndex(arr);
if (index == -1) {
return Double.NaN;
} else {
return arr[index];
}
}
public static int getMin(int[] arr) {
return arr[getMinIndex(arr)];
}
public static double getMax(double[] arr) {
int index = getMaxIndex(arr);
if (index == -1) {
return Double.NaN;
} else {
return arr[index];
}
}
public static int getMax(int[] arr) {
return arr[getMaxIndex(arr)];
}
public static double getMedian(double[] arr) {
int index = getMedianIndex(arr);
if (index == -1) {
return Double.NaN;
} else {
return arr[index];
}
}
public static int getMedian(int[] arr) {
return arr[getMedianIndex(arr)];
}
public static double getNonNegativesMedian(double[] arr) {
int index = getNonNegativesMedianIndex(arr);
if (index == -1) {
return Double.NaN;
} else {
return arr[index];
}
}
public static int getNonNegativesMedian(int[] arr) {
return arr[getNonNegativesMedianIndex(arr)];
}
public static double getStandardDeviation(double[] values) {
double mean = 0.0;
for (int i = 0; i < values.length; i++) {
mean += values[i];
}
mean /= values.length;
return getStandardDeviation(values, mean);
}
public static double getStandardDeviation(int[] values) {
double[] dArr = new double[values.length];
for (int i = 0; i < values.length; i++) {
dArr[i] = values[i];
}
return getStandardDeviation(dArr);
}
public static double getStandardDeviation(double[] values, double mean) {
double stdDev = 0.0;
for (int i = 0; i < values.length; i++) {
stdDev += Math.pow(values[i] - mean, 2);
}
stdDev /= values.length;
stdDev = Math.sqrt(stdDev);
return stdDev;
}
public static class IndexValuePair implements Comparable<IndexValuePair> {
private int index;
private double value;
public IndexValuePair(int index, double value) {
this.index = index;
this.value = value;
}
/**
* @return the index
*/
public int getIndex() {
return index;
}
/**
* @param index the index to set
*/
public void setIndex(int index) {
this.index = index;
}
/**
* @return the value
*/
public double getValue() {
return value;
}
/**
* @param value the value to set
*/
public void setValue(double value) {
this.value = value;
}
@Override
public int compareTo(IndexValuePair indexValuePair) {
return compare(this.value, indexValuePair.value, 1e-100);
}
}
public static double[] getUnitVector(double[] v) {
double[] weightVector = new double[v.length];
for (int j = 0; j < v.length; j++) {
weightVector[j] = v[j] == 0 ? 0.0000000001 : v[j];
}
double norm = 0;
for (int j = 0; j < weightVector.length; j++) {
norm += Math.pow(weightVector[j], 2);
}
norm = Math.sqrt(norm);
for (int j = 0; j < weightVector.length; j++) {
weightVector[j] /= norm;
}
return weightVector;
}
public static double getEuclideanDistance(
double[] v1,
double[] v2) {
double powerSum = 0;
for (int i = 0; i < v1.length; i++) {
powerSum += Math.pow(v1[i] - v2[i], 2);
}
return Math.sqrt(powerSum);
}
public static double getDotProduct(double[] v1, double[] v2) {
double d = 0.0;
for (int i = 0; i < v1.length; i++) {
d += v1[i] * v2[i];
}
return d;
}
public static double getNorm(double[] v) {
double refDirNorm;
refDirNorm = 0.0; // Eventually will be the NORM of the direction
for (int k = 0; k < v.length; k++) {
refDirNorm += Math.pow(v[k], 2);
}
refDirNorm = Math.sqrt(refDirNorm); // After this line refDirNorm will
// be the NORM of the reference direction
return refDirNorm;
}
public static double[] multiply(double s, double[] v) {
double[] sv = new double[v.length];
for (int i = 0; i < v.length; i++) {
sv[i] = s * v[i];
}
return sv;
}
public static double[] add(double s, double[] v) {
double[] sv = new double[v.length];
for (int i = 0; i < v.length; i++) {
sv[i] = s + v[i];
}
return sv;
}
public static double[] add(double[] v1, double[] v2) {
double[] sum = new double[v1.length];
for (int i = 0; i < v1.length; i++) {
sum[i] = v1[i] + v2[i];
}
return sum;
}
/**
* Get the perpendicular distance from point v to vector u
*
* @param v
* @param u
* @return
*/
public static double getPerpendicularDistance(double[] v, double[] u) {
double[] proj = getProjection(v, u);
return getDistance(proj, v);
}
public static double[] getProjection(double[] v, double[] u) {
double scalarProjection = getScalarProjection(v, u);
return Mathematics.multiply(
scalarProjection,
Mathematics.getUnitVector(u));
}
public static double getScalarProjection(double[] v, double[] u) {
double dot = Mathematics.getDotProduct(v, u);
double refDirNorm = Mathematics.getNorm(u);
return dot / refDirNorm;
}
public static double getDistance(double[] v1, double[] v2) {
return getNorm(add(v1, multiply(-1, v2)));
}
public static double[] medianEachRow(double[][] matrix) {
double[] arr = new double[matrix.length];
for (int i = 0; i < matrix.length; i++) {
arr[i] = Mathematics.getMedian(matrix[i]);
}
return arr;
}
public static double[] meanEachRow(double[][] matrix) {
double[] arr = new double[matrix.length];
for (int i = 0; i < matrix.length; i++) {
arr[i] = Mathematics.getAverage(matrix[i]);
}
return arr;
}
}
| |
/**
* Licensed to the Sakai Foundation (SF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.sakaiproject.nakamura.batch;
import org.apache.sling.api.SlingException;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.resource.ResourceNotFoundException;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.commons.json.JSONArray;
import org.apache.sling.commons.json.JSONException;
import org.apache.sling.commons.json.JSONObject;
import org.apache.sling.commons.json.io.JSONWriter;
import org.sakaiproject.nakamura.api.lite.authorizable.User;
import org.sakaiproject.nakamura.util.RequestInfo;
import org.sakaiproject.nakamura.util.RequestWrapper;
import org.sakaiproject.nakamura.util.ResponseWrapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.List;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
public class BatchHelper {
private static final Logger LOGGER = LoggerFactory
.getLogger(BatchHelper.class);
protected static final String REQUESTS_PARAMETER = "requests";
/**
* Takes the original request and starts the batching.
*
* @param request
* @param response
* @throws IOException
* @throws ServletException
*/
protected void batchRequest(SlingHttpServletRequest request,
SlingHttpServletResponse response, String jsonRequest, boolean allowModify) throws IOException, ServletException {
// Grab the JSON block out of it and convert it to RequestData objects we can use.
List<RequestInfo> batchedRequests = new ArrayList<RequestInfo>();
try {
JSONArray arr = new JSONArray(jsonRequest);
for (int i = 0; i < arr.length(); i++) {
JSONObject obj = arr.getJSONObject(i);
RequestInfo r = new RequestInfo(obj);
if ( allowModify || r.isSafe() ) {
batchedRequests.add(r);
}
}
} catch (MalformedURLException e) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,e.getMessage());
return;
} catch (URISyntaxException e) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,e.getMessage());
return;
} catch (UnsupportedEncodingException e) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,e.getMessage());
return;
} catch (JSONException e) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"Failed to parse the " + REQUESTS_PARAMETER + " parameter");
LOGGER.warn("Failed to parse the " + REQUESTS_PARAMETER + " parameter");
return;
}
// Loop over the requests and handle each one.
try {
StringWriter sw = new StringWriter();
JSONWriter write = new JSONWriter(sw);
write.object();
write.key("results");
write.array();
for (RequestInfo r : batchedRequests) {
doRequest(request, response, r, write);
}
write.endArray();
write.endObject();
response.setContentType("application/json");
response.setCharacterEncoding("UTF-8");
response.getWriter().write(sw.getBuffer().toString());
} catch (JSONException e) {
LOGGER.warn("Failed to create a JSON response");
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
"Failed to write JSON response");
}
}
private void doRequest(SlingHttpServletRequest request,
SlingHttpServletResponse response, RequestInfo requestInfo,
JSONWriter write) throws JSONException, ServletException {
// Look for a matching resource in the usual way. If one is found,
// the resource will also be embedded with any necessary RequestPathInfo.
// TODO: This is a nasty hack to work around white listing of /system/batch POST
// requests. This should be removed when the UI has refactored itself not to use batch
// POSTs in place of GETs (see http spec for reasons by thats bad)
if (User.ANON_USER.equals(request.getRemoteUser())) {
if (!"GET".equals(requestInfo.getMethod())) {
response.reset();
throw new ServletException("Anon Users may only perform GET operations");
}
}
String requestPath = requestInfo.getUrl();
ResourceResolver resourceResolver = request.getResourceResolver();
Resource resource = resourceResolver.resolve(request, requestPath);
// Wrap the request and response.
RequestWrapper requestWrapper = new RequestWrapper(request, requestInfo);
ResponseWrapper responseWrapper = new ResponseWrapper(response);
RequestDispatcher requestDispatcher;
try {
// Get the response
try {
if (resource != null) {
LOGGER.debug("Dispatching to request path='{}', resource path='{}'", requestPath, resource.getPath());
requestDispatcher = request.getRequestDispatcher(resource);
} else {
LOGGER.debug("Dispatching to request path='{}', no resource", requestPath);
requestDispatcher = request.getRequestDispatcher(requestPath);
}
requestDispatcher.forward(requestWrapper, responseWrapper);
} catch (ResourceNotFoundException e) {
responseWrapper.setStatus(HttpServletResponse.SC_NOT_FOUND);
} catch (SlingException e) {
responseWrapper.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
// Write the response (status, headers, body) back to the client.
writeResponse(write, responseWrapper, requestInfo);
} catch (ServletException e) {
writeFailedRequest(write, requestInfo);
} catch (IOException e) {
writeFailedRequest(write, requestInfo);
}
}
private void writeResponse(JSONWriter write, ResponseWrapper responseWrapper,
RequestInfo requestData) throws JSONException {
try {
String body = responseWrapper.getDataAsString();
write.object();
write.key("url");
write.value(requestData.getUrl());
write.key("success");
write.value(true);
write.key("body");
write.value(body);
write.key("status");
write.value(responseWrapper.getResponseStatus());
write.key("headers");
write.object();
Dictionary<String, String> headers = responseWrapper.getResponseHeaders();
Enumeration<String> keys = headers.keys();
while (keys.hasMoreElements()) {
String k = keys.nextElement();
write.key(k);
write.value(headers.get(k));
}
write.endObject();
write.endObject();
} catch (UnsupportedEncodingException e) {
writeFailedRequest(write, requestData);
}
}
private void writeFailedRequest(JSONWriter write, RequestInfo requestData)
throws JSONException {
write.object();
write.key("url");
write.value(requestData.getUrl());
write.key("success");
write.value(false);
write.endObject();
}
}
| |
/**
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.demo.iotworld.photo.library;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.imageio.ImageIO;
import org.kaaproject.kaa.demo.iotworld.photo.PhotoAlbumInfo;
import org.kaaproject.kaa.demo.iotworld.photo.PhotoPlayerApplication;
import org.kaaproject.kaa.demo.iotworld.photo.PhotoUploadRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PhotoLibrary {
private static final Logger LOG = LoggerFactory.getLogger(PhotoPlayerApplication.class);
private static final int MAX_PHOTOS_IN_THUMBNAIL = 1;
private static final String UPLOADS_ALBUM_NAME = "Uploads";
public static final int THUMBNAIL_SIZE = 256;
private static final String[] SUPPORTED_EXTENSIONS = { ".jpg", ".jpeg", ".png" };
private final Path rootPath;
private final Map<String, PhotoAlbum> albums;
public PhotoLibrary(Path rootPath) {
super();
this.rootPath = rootPath;
this.albums = new LinkedHashMap<>();
}
public void scan() {
LOG.info("Scanning path: {}", rootPath.toString());
albums.clear();
try {
Files.walkFileTree(rootPath, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path aFile, BasicFileAttributes aAttrs) throws IOException {
LOG.info("Processing file: {}", aFile);
scanFile(aFile);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult preVisitDirectory(Path aDir, BasicFileAttributes aAttrs) throws IOException {
LOG.info("Processing directory: {}", aDir);
return FileVisitResult.CONTINUE;
}
});
generateThumbnails(albums.values());
} catch (IOException e) {
e.printStackTrace();
}
}
private PhotoAlbum scanFile(Path aFile) {
String filePath = aFile.toFile().getAbsolutePath();
if (validate(filePath)) {
String albumId = aFile.getParent().toFile().getAbsolutePath();
PhotoAlbum album = albums.get(albumId);
if (album == null) {
album = new PhotoAlbum(albumId);
album.getInfo().setTitle(aFile.getParent().toFile().getName());
albums.put(albumId, album);
}
album.addPhoto(filePath);
return album;
}
return null;
}
private boolean validate(String filePath) {
for (String ext : SUPPORTED_EXTENSIONS) {
if (filePath.toLowerCase().endsWith(ext)) {
return true;
}
}
return false;
}
public String getNext(String albumId) {
if (albumId != null && albums.containsKey(albumId)) {
List<String> keys = new ArrayList<String>(albums.keySet());
for (int i = 0; i < keys.size(); i++) {
if (keys.get(i).equals(albumId)) {
int pos = i + 1;
if (pos < keys.size()) {
return keys.get(pos);
}
}
}
return keys.get(0);
} else {
return albums.keySet().iterator().next();
}
}
public int getSize() {
return albums.size();
}
public List<PhotoAlbum> getAlbums() {
return new ArrayList<PhotoAlbum>(albums.values());
}
public List<PhotoAlbumInfo> buildAlbumInfoList() {
List<PhotoAlbumInfo> albums = new ArrayList<PhotoAlbumInfo>();
for (PhotoAlbum album : getAlbums()) {
albums.add(album.getInfo());
}
return albums;
}
public String upload(PhotoUploadRequest event) {
String uploadAlbumPath = getUploadsAlbumId();
File uploadAlbumFile = new File(uploadAlbumPath);
if (!uploadAlbumFile.exists()) {
LOG.info("Creating upload album");
uploadAlbumFile.mkdirs();
}
File photoFile = new File(uploadAlbumFile, event.getName());
if (photoFile.exists()) {
LOG.info("Photo with such name already exists. Will overwrite it!");
photoFile.delete();
}
try (FileOutputStream os = new FileOutputStream(photoFile)) {
os.write(toByteArray(event.getBody()));
} catch (IOException e) {
LOG.error("Failed to write new file", e);
throw new RuntimeException(e);
}
PhotoAlbum album = scanFile(Paths.get(photoFile.toURI()));
if(album.getPhotos().size() <= MAX_PHOTOS_IN_THUMBNAIL){
generateThumbnail(album);
}
return album.getAlbumId();
}
public String getUploadsAlbumId() {
return rootPath + File.separator + UPLOADS_ALBUM_NAME;
}
private void generateThumbnails(Collection<PhotoAlbum> albums) {
for(PhotoAlbum album : albums){
generateThumbnail(album);
}
}
private void generateThumbnail(PhotoAlbum album) {
if(album.getPhotos().isEmpty()){
throw new IllegalArgumentException("Can't generate thumbnail for empty album!");
}
String photo = album.getPhotos().get(0);
try {
long time = System.currentTimeMillis();
album.getInfo().setThumbnail(toThumbnailData(photo));
LOG.info("Thumbnail generation time = {}", System.currentTimeMillis() - time);
} catch (IOException e) {
LOG.error("Faield to create thumbnail for {}", photo, e);
}
}
private ByteBuffer toThumbnailData(String photo) throws IOException {
BufferedImage image = ImageIO.read(new File(photo));
return toThumbnailData(image);
}
public static ByteBuffer toThumbnailData(BufferedImage image) throws IOException {
float size = Math.min(image.getWidth(), image.getHeight());
float scale = size / THUMBNAIL_SIZE;
int thumbnailWidth = (int)(image.getWidth() / scale);
int thumbnailHeight = (int)(image.getHeight() / scale);
BufferedImage bufferedThumbnail = new BufferedImage(thumbnailWidth, thumbnailHeight, Image.SCALE_SMOOTH);
Graphics2D g2 = bufferedThumbnail.createGraphics();
g2.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC);
g2.drawImage(image, 0, 0, thumbnailWidth, thumbnailHeight, null);
g2.dispose();
ByteArrayOutputStream byteArrayOut = new ByteArrayOutputStream();
ImageIO.write(bufferedThumbnail, "jpeg", byteArrayOut);
return ByteBuffer.wrap(byteArrayOut.toByteArray());
}
private static byte[] toByteArray(ByteBuffer bb) {
byte[] data = new byte[bb.remaining()];
bb.get(data);
return data;
}
public PhotoAlbum getAlbum(String albumId) {
return albums.get(albumId);
}
public void deleteUploadsAlbum() {
String uploadDirPath = getUploadsAlbumId();
File uploadDir = new File(uploadDirPath);
for(File child : uploadDir.listFiles()){
if(!child.delete()){
LOG.warn("Failed to delete {} {}", child.getAbsolutePath(), child.isDirectory() ? "dir" : "file");
}
}
if(!uploadDir.delete()){
LOG.warn("Failed to delete {}", uploadDir.getAbsolutePath());
}
albums.remove(uploadDirPath);
}
}
| |
/*
* Copyright (c) 1997, 2003, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.io;
import sun.nio.cs.ext.IBM970;
/**
* @author Malcolm Ayres
*/
public class CharToByteCp970 extends CharToByteConverter
{
private static final char SBase = '\uAC00';
private static final char LBase = '\u1100';
private static final char VBase = '\u1161';
private static final char TBase = '\u11A7';
private static final int VCount = 21;
private static final int TCount = 28;
private static final byte G0 = 0;
private static final byte G1 = 1;
private static final byte G2 = 2;
private static final byte G3 = 3;
private byte charState = G0;
private char l, v, t;
private byte[] outputByte;
private char highHalfZoneCode;
private int mask1;
private int mask2;
private int shift;
private short[] index1;
private String index2;
private String index2a;
private final static IBM970 nioCoder = new IBM970();
public CharToByteCp970() {
super();
highHalfZoneCode = 0;
outputByte = new byte[2];
mask1 = 0xFFF8;
mask2 = 0x0007;
shift = 3;
index1 = nioCoder.getEncoderIndex1();
index2 = nioCoder.getEncoderIndex2();
index2a = nioCoder.getEncoderIndex2a();
}
/**
* flush out any residual data and reset the buffer state
*/
public int flush(byte[] output, int outStart, int outEnd)
throws MalformedInputException,
ConversionBufferFullException
{
int bytesOut;
byteOff = outStart;
if (highHalfZoneCode != 0) {
reset();
badInputLength = 0;
throw new MalformedInputException();
}
if (charState != G0) {
try {
unicodeToBuffer(composeHangul() ,output, outEnd);
}
catch(UnknownCharacterException e) {
reset();
badInputLength = 0;
throw new MalformedInputException();
}
charState = G0;
}
bytesOut = byteOff - outStart;
reset();
return bytesOut;
}
/**
* Resets converter to its initial state.
*/
public void reset() {
highHalfZoneCode = 0;
charState = G0;
charOff = byteOff = 0;
}
/**
* Returns true if the given character can be converted to the
* target character encoding.
*/
public boolean canConvert(char ch) {
int index;
int theBytes;
index = index1[((ch & mask1) >> shift)] + (ch & mask2);
if (index < 15000)
theBytes = (int)(index2.charAt(index));
else
theBytes = (int)(index2a.charAt(index-15000));
if (theBytes != 0)
return (true);
// only return true if input char was unicode null - all others are
// undefined
return( ch == '\u0000');
}
/**
* Character conversion
*/
public int convert(char[] input, int inOff, int inEnd,
byte[] output, int outOff, int outEnd)
throws UnknownCharacterException, MalformedInputException,
ConversionBufferFullException
{
char inputChar;
int inputSize;
charOff = inOff;
byteOff = outOff;
while (charOff < inEnd) {
if (highHalfZoneCode == 0) {
inputChar = input[charOff];
inputSize = 1;
} else {
inputChar = highHalfZoneCode;
inputSize = 0;
highHalfZoneCode = 0;
}
switch (charState) {
case G0:
l = LBase;
v = VBase;
t = TBase;
if ( isLeadingC(inputChar) ) { // Leading Consonant
l = inputChar;
charState = G1;
break;
}
if ( isVowel(inputChar) ) { // Vowel
v = inputChar;
charState = G2;
break;
}
if ( isTrailingC(inputChar) ) { // Trailing Consonant
t = inputChar;
charState = G3;
break;
}
break;
case G1:
if ( isLeadingC(inputChar) ) { // Leading Consonant
l = composeLL(l, inputChar);
break;
}
if ( isVowel(inputChar) ) { // Vowel
v = inputChar;
charState = G2;
break;
}
if ( isTrailingC(inputChar) ) { // Trailing Consonant
t = inputChar;
charState = G3;
break;
}
unicodeToBuffer(composeHangul(), output, outEnd);
charState = G0;
break;
case G2:
if ( isLeadingC(inputChar) ) { // Leading Consonant
unicodeToBuffer(composeHangul(), output, outEnd);
l = inputChar;
v = VBase;
t = TBase;
charState = G1;
break;
}
if ( isVowel(inputChar) ) { // Vowel
v = composeVV(l, inputChar);
charState = G2;
break;
}
if ( isTrailingC(inputChar) ) { // Trailing Consonant
t = inputChar;
charState = G3;
break;
}
unicodeToBuffer(composeHangul(), output, outEnd);
charState = G0;
break;
case G3:
if ( isTrailingC(inputChar) ) { // Trailing Consonant
t = composeTT(t, inputChar);
charState = G3;
break;
}
unicodeToBuffer(composeHangul(), output, outEnd);
charState = G0;
break;
}
if (charState != G0)
charOff++;
else {
// Is this a high surrogate?
if(inputChar >= '\ud800' && inputChar <= '\udbff') {
// Is this the last character of the input?
if (charOff + inputSize >= inEnd) {
highHalfZoneCode = inputChar;
charOff += inputSize;
break;
}
// Is there a low surrogate following?
inputChar = input[charOff + inputSize];
if (inputChar >= '\udc00' && inputChar <= '\udfff') {
// We have a valid surrogate pair. Too bad we don't do
// surrogates. Is substitution enabled?
if (subMode) {
if (subBytes.length == 1) {
outputByte[0] = 0x00;
outputByte[1] = subBytes[0];
} else {
outputByte[0] = subBytes[0];
outputByte[1] = subBytes[1];
}
bytesToBuffer(outputByte, output, outEnd);
inputSize++;
} else {
badInputLength = 2;
throw new UnknownCharacterException();
}
} else {
// We have a malformed surrogate pair
badInputLength = 1;
throw new MalformedInputException();
}
}
// Is this an unaccompanied low surrogate?
else
if (inputChar >= '\uDC00' && inputChar <= '\uDFFF') {
badInputLength = 1;
throw new MalformedInputException();
} else {
unicodeToBuffer(inputChar, output, outEnd);
}
charOff += inputSize;
}
}
return byteOff - outOff;
}
private char composeHangul() {
int lIndex, vIndex, tIndex;
lIndex = l - LBase;
vIndex = v - VBase;
tIndex = t - TBase;
return (char)((lIndex * VCount + vIndex) * TCount + tIndex + SBase);
}
private char composeLL(char l1, char l2) {
return l2;
}
private char composeVV(char v1, char v2) {
return v2;
}
private char composeTT(char t1, char t2) {
return t2;
}
private boolean isLeadingC(char c) {
return (c >= LBase && c <= '\u1159');
}
private boolean isVowel(char c) {
return (c >= VBase && c <= '\u11a2');
}
private boolean isTrailingC(char c) {
return (c >= TBase && c <= '\u11f9');
}
/**
* returns the maximum number of bytes needed to convert a char
*/
public int getMaxBytesPerChar() {
return 2;
}
/**
* Return the character set ID
*/
public String getCharacterEncoding() {
return "Cp970";
}
/**
* private function to add the bytes to the output buffer
*/
private void bytesToBuffer(byte[] theBytes, byte[] output, int outEnd)
throws ConversionBufferFullException,
UnknownCharacterException {
int spaceNeeded;
// ensure sufficient space for the bytes(s)
if (theBytes[0] == 0x00)
spaceNeeded = 1;
else
spaceNeeded = 2;
if (byteOff + spaceNeeded > outEnd)
throw new ConversionBufferFullException();
// move the data into the buffer
if (spaceNeeded == 1)
output[byteOff++] = theBytes[1];
else {
output[byteOff++] = theBytes[0];
output[byteOff++] = theBytes[1];
}
}
/**
* private function to add a unicode character to the output buffer
*/
private void unicodeToBuffer(char unicode, byte[] output, int outEnd)
throws ConversionBufferFullException,
UnknownCharacterException {
int index;
int theBytes;
// first we convert the unicode to its byte representation
index = index1[((unicode & mask1) >> shift)] + (unicode & mask2);
if (index < 15000) {
theBytes = (int)(index2.charAt(index));
} else {
theBytes = (int)(index2a.charAt(index-15000));
}
outputByte[0] = (byte)((theBytes & 0x0000ff00)>>8);
outputByte[1] = (byte)(theBytes & 0x000000ff);
// if the unicode was not mappable - look for the substitution bytes
if (outputByte[0] == 0x00 && outputByte[1] == 0x00
&& unicode != '\u0000') {
if (subMode) {
if (subBytes.length == 1) {
outputByte[0] = 0x00;
outputByte[1] = subBytes[0];
} else {
outputByte[0] = subBytes[0];
outputByte[1] = subBytes[1];
}
} else {
badInputLength = 1;
throw new UnknownCharacterException();
}
}
// now put the bytes in the buffer
bytesToBuffer(outputByte, output, outEnd);
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.metadata;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.metamx.common.ISE;
import com.metamx.common.RetryUtils;
import com.metamx.common.logger.Logger;
import org.apache.commons.dbcp2.BasicDataSource;
import org.skife.jdbi.v2.Batch;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.TransactionCallback;
import org.skife.jdbi.v2.TransactionStatus;
import org.skife.jdbi.v2.exceptions.DBIException;
import org.skife.jdbi.v2.exceptions.UnableToExecuteStatementException;
import org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException;
import org.skife.jdbi.v2.tweak.HandleCallback;
import org.skife.jdbi.v2.util.ByteArrayMapper;
import org.skife.jdbi.v2.util.IntegerMapper;
import java.sql.SQLException;
import java.sql.SQLRecoverableException;
import java.sql.SQLTransientException;
import java.util.List;
import java.util.concurrent.Callable;
public abstract class SQLMetadataConnector implements MetadataStorageConnector
{
private static final Logger log = new Logger(SQLMetadataConnector.class);
private static final String PAYLOAD_TYPE = "BLOB";
private final Supplier<MetadataStorageConnectorConfig> config;
private final Supplier<MetadataStorageTablesConfig> tablesConfigSupplier;
private final Predicate<Throwable> shouldRetry;
public SQLMetadataConnector(
Supplier<MetadataStorageConnectorConfig> config,
Supplier<MetadataStorageTablesConfig> tablesConfigSupplier
)
{
this.config = config;
this.tablesConfigSupplier = tablesConfigSupplier;
this.shouldRetry = new Predicate<Throwable>()
{
@Override
public boolean apply(Throwable e)
{
return isTransientException(e);
}
};
}
/**
* SQL type to use for payload data (e.g. JSON blobs).
* Must be a binary type, which values can be accessed using ResultSet.getBytes()
* <p/>
* The resulting string will be interpolated into the table creation statement, e.g.
* <code>CREATE TABLE druid_table ( payload <type> NOT NULL, ... )</code>
*
* @return String representing the SQL type
*/
protected String getPayloadType()
{
return PAYLOAD_TYPE;
}
/**
* Auto-incrementing SQL type to use for IDs
* Must be an integer type, which values will be automatically set by the database
* <p/>
* The resulting string will be interpolated into the table creation statement, e.g.
* <code>CREATE TABLE druid_table ( id <type> NOT NULL, ... )</code>
*
* @return String representing the SQL type and auto-increment statement
*/
protected abstract String getSerialType();
public String getValidationQuery() { return "SELECT 1"; }
public abstract boolean tableExists(Handle handle, final String tableName);
public <T> T retryWithHandle(final HandleCallback<T> callback)
{
final Callable<T> call = new Callable<T>()
{
@Override
public T call() throws Exception
{
return getDBI().withHandle(callback);
}
};
final int maxTries = 10;
try {
return RetryUtils.retry(call, shouldRetry, maxTries);
}
catch (Exception e) {
throw Throwables.propagate(e);
}
}
public <T> T retryTransaction(final TransactionCallback<T> callback)
{
final Callable<T> call = new Callable<T>()
{
@Override
public T call() throws Exception
{
return getDBI().inTransaction(callback);
}
};
final int maxTries = 10;
try {
return RetryUtils.retry(call, shouldRetry, maxTries);
}
catch (Exception e) {
throw Throwables.propagate(e);
}
}
public final boolean isTransientException(Throwable e)
{
return e != null && (e instanceof SQLTransientException
|| e instanceof SQLRecoverableException
|| e instanceof UnableToObtainConnectionException
|| e instanceof UnableToExecuteStatementException
|| connectorIsTransientException(e)
|| (e instanceof SQLException && isTransientException(e.getCause()))
|| (e instanceof DBIException && isTransientException(e.getCause())));
}
protected boolean connectorIsTransientException(Throwable e)
{
return false;
}
public void createTable(final String tableName, final Iterable<String> sql)
{
try {
retryWithHandle(
new HandleCallback<Void>()
{
@Override
public Void withHandle(Handle handle) throws Exception
{
if (!tableExists(handle, tableName)) {
log.info("Creating table[%s]", tableName);
final Batch batch = handle.createBatch();
for (String s : sql) {
batch.add(s);
}
batch.execute();
} else {
log.info("Table[%s] already exists", tableName);
}
return null;
}
}
);
}
catch (Exception e) {
log.warn(e, "Exception creating table");
}
}
public void createPendingSegmentsTable(final String tableName)
{
createTable(
tableName,
ImmutableList.of(
String.format(
"CREATE TABLE %1$s (\n"
+ " id VARCHAR(255) NOT NULL,\n"
+ " dataSource VARCHAR(255) NOT NULL,\n"
+ " created_date VARCHAR(255) NOT NULL,\n"
+ " start VARCHAR(255) NOT NULL,\n"
+ " \"end\" VARCHAR(255) NOT NULL,\n"
+ " sequence_name VARCHAR(255) NOT NULL,\n"
+ " sequence_prev_id VARCHAR(255) NOT NULL,\n"
+ " payload %2$s NOT NULL,\n"
+ " PRIMARY KEY (id),\n"
+ " UNIQUE (sequence_name, sequence_prev_id)\n"
+ ")",
tableName, getPayloadType()
)
)
);
}
public void createSegmentTable(final String tableName)
{
createTable(
tableName,
ImmutableList.of(
String.format(
"CREATE TABLE %1$s (\n"
+ " id VARCHAR(255) NOT NULL,\n"
+ " dataSource VARCHAR(255) NOT NULL,\n"
+ " created_date VARCHAR(255) NOT NULL,\n"
+ " start VARCHAR(255) NOT NULL,\n"
+ " \"end\" VARCHAR(255) NOT NULL,\n"
+ " partitioned BOOLEAN NOT NULL,\n"
+ " version VARCHAR(255) NOT NULL,\n"
+ " used BOOLEAN NOT NULL,\n"
+ " payload %2$s NOT NULL,\n"
+ " PRIMARY KEY (id)\n"
+ ")",
tableName, getPayloadType()
),
String.format("CREATE INDEX idx_%1$s_datasource ON %1$s(dataSource)", tableName),
String.format("CREATE INDEX idx_%1$s_used ON %1$s(used)", tableName)
)
);
}
public void createRulesTable(final String tableName)
{
createTable(
tableName,
ImmutableList.of(
String.format(
"CREATE TABLE %1$s (\n"
+ " id VARCHAR(255) NOT NULL,\n"
+ " dataSource VARCHAR(255) NOT NULL,\n"
+ " version VARCHAR(255) NOT NULL,\n"
+ " payload %2$s NOT NULL,\n"
+ " PRIMARY KEY (id)\n"
+ ")",
tableName, getPayloadType()
),
String.format("CREATE INDEX idx_%1$s_datasource ON %1$s(dataSource)", tableName)
)
);
}
public void createConfigTable(final String tableName)
{
createTable(
tableName,
ImmutableList.of(
String.format(
"CREATE TABLE %1$s (\n"
+ " name VARCHAR(255) NOT NULL,\n"
+ " payload %2$s NOT NULL,\n"
+ " PRIMARY KEY(name)\n"
+ ")",
tableName, getPayloadType()
)
)
);
}
public void createEntryTable(final String tableName)
{
createTable(
tableName,
ImmutableList.of(
String.format(
"CREATE TABLE %1$s (\n"
+ " id VARCHAR(255) NOT NULL,\n"
+ " created_date VARCHAR(255) NOT NULL,\n"
+ " datasource VARCHAR(255) NOT NULL,\n"
+ " payload %2$s NOT NULL,\n"
+ " status_payload %2$s NOT NULL,\n"
+ " active BOOLEAN NOT NULL DEFAULT FALSE,\n"
+ " PRIMARY KEY (id)\n"
+ ")",
tableName, getPayloadType()
),
String.format("CREATE INDEX idx_%1$s_active_created_date ON %1$s(active, created_date)", tableName)
)
);
}
public void createLogTable(final String tableName, final String entryTypeName)
{
createTable(
tableName,
ImmutableList.of(
String.format(
"CREATE TABLE %1$s (\n"
+ " id %2$s NOT NULL,\n"
+ " %4$s_id VARCHAR(255) DEFAULT NULL,\n"
+ " log_payload %3$s,\n"
+ " PRIMARY KEY (id)\n"
+ ")",
tableName, getSerialType(), getPayloadType(), entryTypeName
),
String.format("CREATE INDEX idx_%1$s_%2$s_id ON %1$s(%2$s_id)", tableName, entryTypeName)
)
);
}
public void createLockTable(final String tableName, final String entryTypeName)
{
createTable(
tableName,
ImmutableList.of(
String.format(
"CREATE TABLE %1$s (\n"
+ " id %2$s NOT NULL,\n"
+ " %4$s_id VARCHAR(255) DEFAULT NULL,\n"
+ " lock_payload %3$s,\n"
+ " PRIMARY KEY (id)\n"
+ ")",
tableName, getSerialType(), getPayloadType(), entryTypeName
),
String.format("CREATE INDEX idx_%1$s_%2$s_id ON %1$s(%2$s_id)", tableName, entryTypeName)
)
);
}
@Override
public Void insertOrUpdate(
final String tableName,
final String keyColumn,
final String valueColumn,
final String key,
final byte[] value
) throws Exception
{
return getDBI().inTransaction(
new TransactionCallback<Void>()
{
@Override
public Void inTransaction(Handle handle, TransactionStatus transactionStatus) throws Exception
{
int count = handle
.createQuery(
String.format("SELECT COUNT(*) FROM %1$s WHERE %2$s = :key", tableName, keyColumn)
)
.bind("key", key)
.map(IntegerMapper.FIRST)
.first();
if (count == 0) {
handle.createStatement(
String.format(
"INSERT INTO %1$s (%2$s, %3$s) VALUES (:key, :value)",
tableName, keyColumn, valueColumn
)
)
.bind("key", key)
.bind("value", value)
.execute();
} else {
handle.createStatement(
String.format(
"UPDATE %1$s SET %3$s=:value WHERE %2$s=:key",
tableName, keyColumn, valueColumn
)
)
.bind("key", key)
.bind("value", value)
.execute();
}
return null;
}
}
);
}
public abstract DBI getDBI();
@Override
public void createPendingSegmentsTable()
{
if (config.get().isCreateTables()) {
createPendingSegmentsTable(tablesConfigSupplier.get().getPendingSegmentsTable());
}
}
@Override
public void createSegmentTable()
{
if (config.get().isCreateTables()) {
createSegmentTable(tablesConfigSupplier.get().getSegmentsTable());
}
}
@Override
public void createRulesTable() {
if (config.get().isCreateTables()) {
createRulesTable(tablesConfigSupplier.get().getRulesTable());
}
}
@Override
public void createConfigTable() {
if (config.get().isCreateTables()) {
createConfigTable(tablesConfigSupplier.get().getConfigTable());
}
}
@Override
public void createTaskTables() {
if (config.get().isCreateTables()) {
final MetadataStorageTablesConfig tablesConfig = tablesConfigSupplier.get();
final String entryType = tablesConfig.getTaskEntryType();
createEntryTable(tablesConfig.getEntryTable(entryType));
createLogTable(tablesConfig.getLogTable(entryType), entryType);
createLockTable(tablesConfig.getLockTable(entryType), entryType);
}
}
@Override
public byte[] lookup(
final String tableName,
final String keyColumn,
final String valueColumn,
final String key
)
{
final String selectStatement = String.format("SELECT %s FROM %s WHERE %s = :key", valueColumn,
tableName, keyColumn);
return getDBI().withHandle(
new HandleCallback<byte[]>()
{
@Override
public byte[] withHandle(Handle handle) throws Exception
{
List<byte[]> matched = handle.createQuery(selectStatement)
.bind("key", key)
.map(ByteArrayMapper.FIRST)
.list();
if (matched.isEmpty()) {
return null;
}
if (matched.size() > 1) {
throw new ISE("Error! More than one matching entry[%d] found for [%s]?!", matched.size(), key);
}
return matched.get(0);
}
}
);
}
public MetadataStorageConnectorConfig getConfig() { return config.get(); }
protected BasicDataSource getDatasource()
{
MetadataStorageConnectorConfig connectorConfig = getConfig();
BasicDataSource dataSource = new BasicDataSource();
dataSource.setUsername(connectorConfig.getUser());
dataSource.setPassword(connectorConfig.getPassword());
String uri = connectorConfig.getConnectURI();
dataSource.setUrl(uri);
dataSource.setValidationQuery(getValidationQuery());
dataSource.setTestOnBorrow(true);
return dataSource;
}
private void createAuditTable(final String tableName)
{
createTable(
tableName,
ImmutableList.of(
String.format(
"CREATE TABLE %1$s (\n"
+ " id %2$s NOT NULL,\n"
+ " audit_key VARCHAR(255) NOT NULL,\n"
+ " type VARCHAR(255) NOT NULL,\n"
+ " author VARCHAR(255) NOT NULL,\n"
+ " comment VARCHAR(2048) NOT NULL,\n"
+ " created_date VARCHAR(255) NOT NULL,\n"
+ " payload %3$s NOT NULL,\n"
+ " PRIMARY KEY(id)\n"
+ ")",
tableName, getSerialType(), getPayloadType()
),
String.format("CREATE INDEX idx_%1$s_key_time ON %1$s(audit_key, created_date)", tableName),
String.format("CREATE INDEX idx_%1$s_type_time ON %1$s(type, created_date)", tableName),
String.format("CREATE INDEX idx_%1$s_audit_time ON %1$s(created_date)", tableName)
)
);
}
@Override
public void createAuditTable() {
if (config.get().isCreateTables()) {
createAuditTable(tablesConfigSupplier.get().getAuditTable());
}
}
}
| |
/******************************************************************************
* Confidential Proprietary *
* (c) Copyright Haifeng Li 2011, All Rights Reserved *
******************************************************************************/
package smile.plot;
import java.awt.Color;
import smile.math.Math;
import smile.sort.QuickSort;
/**
* A surface object gives 3D information e.g. a contour plots.
*
* @author Haifeng Li
*/
public class Surface extends Plot {
/**
* The data-axis locations of surface.
*/
private double[][][] data;
/**
* Vertex Z-axis value in camera coordinate.
*/
private double[][] zc;
/**
* Average z-axis value of triangles to fill for painter's algorithm.
*/
private double[] az;
/**
* The indices of triangles in descending order of average z-axis values.
*/
private int[] order;
/**
* Triangles. Each row is the index of triangle vertices in data.
*/
private int[][] triangles;
/**
* The minimum of the data.
*/
private double min;
/**
* The maximum of the data.
*/
private double max;
/**
* The window width of values for each color.
*/
private double width = 1.0;
/**
* The color palette to represent values.
*/
private Color[] palette;
/**
* Constructor.
* @param z the z-axis values of surface. The x-axis and y-axis location of
* surface will be set to 0.5, 1.5, 2.5, ...
*/
public Surface(double[][] z) {
super(Color.GRAY);
init(z);
}
/**
* Initialization.
*/
private void init(double[][] z) {
max = Math.max(z);
min = Math.min(z);
if (palette != null) {
width = (max - min) / palette.length;
}
int m = z.length;
int n = z[0].length;
triangles = new int[2 * m * n][6];
az = new double[2 * m * n];
order = new int[az.length];
zc = new double[m][n];
data = new double[m][n][3];
for (int i = 0, k = 0; i < m; i++) {
for (int j = 0; j < n; j++, k += 2) {
data[i][j][0] = i + 0.5;
data[i][j][1] = j + 0.5;
data[i][j][2] = z[i][j];
if (i < m - 1 && j < n - 1) {
triangles[k][0] = i;
triangles[k][1] = j;
triangles[k][2] = i + 1;
triangles[k][3] = j;
triangles[k][4] = i;
triangles[k][5] = j + 1;
triangles[k + 1][0] = i + 1;
triangles[k + 1][1] = j + 1;
triangles[k + 1][2] = i + 1;
triangles[k + 1][3] = j;
triangles[k + 1][4] = i;
triangles[k + 1][5] = j + 1;
}
}
}
}
/**
* Constructor.
* @param z the z-axis values of surface. The x-axis and y-axis location of
* surface will be set to 0.5, 1.5, 2.5, ...
* @param palette the color palette.
*/
public Surface(double[][] z, Color[] palette) {
this.palette = palette;
init(z);
}
/**
* Constructor for regular mesh grid.
* @param x the x-axis values of surface.
* @param y the y-axis values of surface.
* @param z the z-axis values of surface.
*/
public Surface(double[] x, double[] y, double[][] z) {
super(Color.GRAY);
init(x, y, z);
}
/**
* Initialization.
*/
private void init(double[] x, double[] y, double[][] z) {
max = Math.max(z);
min = Math.min(z);
if (palette != null) {
width = (max - min) / palette.length;
}
int m = z.length;
int n = z[0].length;
triangles = new int[2 * m * n][6];
az = new double[2 * m * n];
order = new int[az.length];
zc = new double[m][n];
data = new double[m][n][3];
for (int i = 0, k = 0; i < m; i++) {
for (int j = 0; j < n; j++, k += 2) {
data[i][j][0] = x[i];
data[i][j][1] = y[j];
data[i][j][2] = z[i][j];
if (i < m - 1 && j < n - 1) {
triangles[k][0] = i;
triangles[k][1] = j;
triangles[k][2] = i + 1;
triangles[k][3] = j;
triangles[k][4] = i;
triangles[k][5] = j + 1;
triangles[k + 1][0] = i + 1;
triangles[k + 1][1] = j + 1;
triangles[k + 1][2] = i + 1;
triangles[k + 1][3] = j;
triangles[k + 1][4] = i;
triangles[k + 1][5] = j + 1;
}
}
}
}
/**
* Constructor for regular mesh grid.
* @param x the x-axis values of surface.
* @param y the y-axis values of surface.
* @param z the z-axis values of surface.
* @param palette the color palette.
*/
public Surface(double[] x, double[] y, double[][] z, Color[] palette) {
this.palette = palette;
init(x, y, z);
}
/**
* Constructor for irregular mesh grid.
* @param data an m x n x 3 array which are coordinates of m x n surface.
*/
public Surface(double[][][] data) {
super(Color.GRAY);
init(data);
}
/**
* Initialization.
*/
private void init(double[][][] data) {
this.data = data;
int m = data.length;
int n = data[0].length;
zc = new double[m][n];
triangles = new int[2 * m * n][6];
az = new double[2 * m * n];
order = new int[az.length];
min = Double.POSITIVE_INFINITY;
max = Double.NEGATIVE_INFINITY;
for (int i = 0, k = 0; i < m; i++) {
for (int j = 0; j < n; j++, k += 2) {
double z = data[i][j][2];
if (z < min) {
min = z;
}
if (z > max) {
max = z;
}
if (i < m - 1 && j < n - 1) {
triangles[k][0] = i;
triangles[k][1] = j;
triangles[k][2] = i + 1;
triangles[k][3] = j;
triangles[k][4] = i;
triangles[k][5] = j + 1;
triangles[k + 1][0] = i + 1;
triangles[k + 1][1] = j + 1;
triangles[k + 1][2] = i + 1;
triangles[k + 1][3] = j;
triangles[k + 1][4] = i;
triangles[k + 1][5] = j + 1;
}
}
}
if (palette != null) {
width = (max - min) / palette.length;
}
}
/**
* Constructor for irregular mesh grid.
* @param data an m x n x 3 array which are coordinates of m x n surface.
*/
public Surface(double[][][] data, Color[] palette) {
this.palette = palette;
init(data);
}
@Override
public void paint(Graphics g) {
Color c = g.getColor();
g.setColor(getColor());
for (int i = 0; i < data.length; i++) {
for (int j = 0; j < data[i].length - 1; j++) {
g.drawLine(data[i][j], data[i][j + 1]);
}
}
for (int i = 0; i < data.length - 1; i++) {
for (int j = 0; j < data[i].length; j++) {
g.drawLine(data[i][j], data[i + 1][j]);
}
}
if (palette != null) {
int m = data.length;
int n = data[0].length;
Projection3D p3d = (Projection3D) g.projection;
/**
* Calculates z-axis values in camera coordinates.
*/
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
zc[i][j] = p3d.z(data[i][j]);
}
}
/**
* Calculate (average) z-value for each triangle.
* Note that this is actually just sum, which is sufficient
* for us to sort them.
*/
for (int i = 0; i < triangles.length; i++) {
az[i] = (zc[triangles[i][0]][triangles[i][1]] + zc[triangles[i][2]][triangles[i][3]] + zc[triangles[i][4]][triangles[i][5]]);
}
/**
* Sorts triangles by z-values and paint them from furthest to
* nearest, i.e. painter's algorithm. Although the painter's
* algorithm is computationally and conceptually much easier than
* most alternatives, it does suffer from several flaws. The most
* obvious example of where the painter's algorithm falls short
* is with intersecting surfaces.
*/
for (int i = 0; i < order.length; i++) {
order[i] = i;
}
QuickSort.sort(az, order);
for (int i : order) {
double avg = (data[triangles[i][0]][triangles[i][1]][2] + data[triangles[i][2]][triangles[i][3]][2] + data[triangles[i][4]][triangles[i][5]][2]) / 3.0;
int k = (int) ((avg - min) / width);
if (k == palette.length) {
k = palette.length - 1;
}
g.setColor(palette[k]);
g.fillPolygon(data[triangles[i][0]][triangles[i][1]], data[triangles[i][2]][triangles[i][3]], data[triangles[i][4]][triangles[i][5]]);
}
}
g.setColor(c);
}
/**
* Create a plot canvas with the 3D surface plot of given data.
* @param z the z-axis values of surface. The x-axis and y-axis location of
* surface will be set to 0.5, 1.5, 2.5, ...
*/
public static PlotCanvas plot(double[][] z) {
double[] lowerBound = {0, 0, Math.min(z)};
double[] upperBound = {z.length, z[0].length, Math.max(z)};
PlotCanvas canvas = new PlotCanvas(lowerBound, upperBound);
Surface surface = new Surface(z);
canvas.add(surface);
return canvas;
}
/**
* Create a plot canvas with the 3D surface plot of given data.
* @param z the z-axis values of surface. The x-axis and y-axis location of
* surface will be set to 0.5, 1.5, 2.5, ...
* @param palette the color palette.
*/
public static PlotCanvas plot(double[][] z, Color[] palette) {
double[] lowerBound = {0, 0, Math.min(z)};
double[] upperBound = {z.length, z[0].length, Math.max(z)};
PlotCanvas canvas = new PlotCanvas(lowerBound, upperBound);
Surface surface = new Surface(z, palette);
canvas.add(surface);
return canvas;
}
/**
* Create a plot canvas with the 3D surface plot of given data.
* @param x the x-axis values of surface.
* @param y the y-axis values of surface.
* @param z the z-axis values of surface.
*/
public static PlotCanvas plot(double[] x, double[] y, double[][] z) {
double[] lowerBound = {Math.min(x), Math.min(y), Math.min(z)};
double[] upperBound = {Math.max(x), Math.max(y), Math.max(z)};
PlotCanvas canvas = new PlotCanvas(lowerBound, upperBound);
Surface surface = new Surface(x, y, z);
canvas.add(surface);
return canvas;
}
/**
* Create a plot canvas with the 3D surface plot of given data.
* @param x the x-axis values of surface.
* @param y the y-axis values of surface.
* @param z the z-axis values of surface.
* @param palette the color palette.
*/
public static PlotCanvas plot(double[] x, double[] y, double[][] z, Color[] palette) {
double[] lowerBound = {Math.min(x), Math.min(y), Math.min(z)};
double[] upperBound = {Math.max(x), Math.max(y), Math.max(z)};
PlotCanvas canvas = new PlotCanvas(lowerBound, upperBound);
Surface surface = new Surface(x, y, z, palette);
canvas.add(surface);
return canvas;
}
/**
* Create a plot canvas with the 3D surface plot of given data.
* @param data an m x n x 3 array which are coordinates of m x n surface.
*/
public static PlotCanvas plot(double[][][] data) {
double[] lowerBound = {data[0][0][0], data[0][0][1], data[0][0][2]};
double[] upperBound = {data[0][0][0], data[0][0][1], data[0][0][2]};
for (int i = 0; i < data.length; i++) {
for (int j = 0; j < data[i].length; j++) {
if (data[i][j][0] < lowerBound[0]) {
lowerBound[0] = data[i][j][0];
}
if (data[i][j][0] > upperBound[0]) {
upperBound[0] = data[i][j][0];
}
if (data[i][j][1] < lowerBound[1]) {
lowerBound[1] = data[i][j][1];
}
if (data[i][j][1] > upperBound[1]) {
upperBound[1] = data[i][j][1];
}
if (data[i][j][2] < lowerBound[2]) {
lowerBound[2] = data[i][j][2];
}
if (data[i][j][2] > upperBound[2]) {
upperBound[2] = data[i][j][2];
}
}
}
PlotCanvas canvas = new PlotCanvas(lowerBound, upperBound);
Surface surface = new Surface(data);
canvas.add(surface);
return canvas;
}
/**
* Create a plot canvas with the 3D surface plot of given data.
* @param data an m x n x 3 array which are coordinates of m x n surface.
* @param palette the color palette.
*/
public static PlotCanvas plot(double[][][] data, Color[] palette) {
double[] lowerBound = {data[0][0][0], data[0][0][1], data[0][0][2]};
double[] upperBound = {data[0][0][0], data[0][0][1], data[0][0][2]};
for (int i = 0; i < data.length; i++) {
for (int j = 0; j < data[i].length; j++) {
if (data[i][j][0] < lowerBound[0]) {
lowerBound[0] = data[i][j][0];
}
if (data[i][j][0] > upperBound[0]) {
upperBound[0] = data[i][j][0];
}
if (data[i][j][1] < lowerBound[1]) {
lowerBound[1] = data[i][j][1];
}
if (data[i][j][1] > upperBound[1]) {
upperBound[1] = data[i][j][1];
}
if (data[i][j][2] < lowerBound[2]) {
lowerBound[2] = data[i][j][2];
}
if (data[i][j][2] > upperBound[2]) {
upperBound[2] = data[i][j][2];
}
}
}
PlotCanvas canvas = new PlotCanvas(lowerBound, upperBound);
Surface surface = new Surface(data, palette);
canvas.add(surface);
return canvas;
}
}
| |
package org.codehaus.groovy.grails.orm.hibernate.support;
import grails.validation.ValidationException;
import groovy.lang.Closure;
import groovy.lang.GroovySystem;
import groovy.lang.MetaClass;
import groovy.lang.MetaMethod;
import groovy.lang.MetaProperty;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.codehaus.groovy.grails.commons.GrailsClassUtils;
import org.codehaus.groovy.grails.commons.GrailsDomainClassProperty;
import org.codehaus.groovy.grails.orm.hibernate.cfg.GrailsDomainBinder;
import org.codehaus.groovy.grails.orm.hibernate.cfg.Mapping;
import org.codehaus.groovy.grails.orm.hibernate.metaclass.AbstractDynamicPersistentMethod;
import org.codehaus.groovy.grails.orm.hibernate.metaclass.AbstractSavePersistentMethod;
import org.codehaus.groovy.grails.orm.hibernate.metaclass.ValidatePersistentMethod;
import org.codehaus.groovy.runtime.DefaultGroovyMethods;
import org.codehaus.groovy.runtime.typehandling.DefaultTypeTransformation;
import org.grails.plugins.timetravel.TimeTravel;
import org.hibernate.EntityMode;
import org.hibernate.FlushMode;
import org.hibernate.Session;
import org.hibernate.event.AbstractEvent;
import org.hibernate.event.PreInsertEvent;
import org.hibernate.event.PreUpdateEvent;
import org.hibernate.persister.entity.EntityPersister;
import org.springframework.util.ReflectionUtils;
import org.springframework.validation.Errors;
// http://grepcode.com/file/repo1.maven.org/maven2/org.grails/grails-hibernate/2.1.0.RC1/org/codehaus/groovy/grails/orm/hibernate/EventTriggeringInterceptor.java#EventTriggeringInterceptor.findEventListener%28java.lang.Object%29
// http://grepcode.com/file/repo1.maven.org/maven2/org.grails/grails-hibernate/2.1.0.RC1/org/codehaus/groovy/grails/orm/hibernate/support/ClosureEventListener.java#ClosureEventListener
// https://github.com/grails/grails-core/tree/2.1.x/grails-hibernate/src/main/groovy/org/codehaus/groovy/grails/orm/hibernate
public class TimetravelClosureEventListener extends ClosureEventListener {
private static final long serialVersionUID = 1;
private static final Log log = LogFactory.getLog(TimetravelClosureEventListener.class);
private static final Object[] EMPTY_OBJECT_ARRAY = new Object[] {};
EventTriggerCaller beforeInsertCaller;
EventTriggerCaller preUpdateEventListener;
boolean shouldTimestamp = false;
boolean failOnErrorEnabled = false;
MetaProperty dateCreatedProperty;
MetaProperty lastUpdatedProperty;
MetaProperty errorsProperty;
MetaClass domainMetaClass;
MetaMethod validateMethod;
Map validateParams;
public TimetravelClosureEventListener(Class<?> domainClazz, boolean failOnError, List failOnErrorPackages) {
super(domainClazz, failOnError, failOnErrorPackages);
domainMetaClass = GroovySystem.getMetaClassRegistry().getMetaClass(domainClazz);
dateCreatedProperty = domainMetaClass.getMetaProperty(GrailsDomainClassProperty.DATE_CREATED);
lastUpdatedProperty = domainMetaClass.getMetaProperty(GrailsDomainClassProperty.LAST_UPDATED);
if (dateCreatedProperty != null || lastUpdatedProperty != null) {
Mapping m = GrailsDomainBinder.getMapping(domainClazz);
shouldTimestamp = m == null || m.isAutoTimestamp();
log.debug("" + domainClazz + ": " + shouldTimestamp);
}
beforeInsertCaller = buildCaller(domainClazz, ClosureEventTriggeringInterceptor.BEFORE_INSERT_EVENT);
preUpdateEventListener = buildCaller(domainClazz, ClosureEventTriggeringInterceptor.BEFORE_UPDATE_EVENT);
if (failOnErrorPackages.size() > 0) {
failOnErrorEnabled = GrailsClassUtils.isClassBelowPackage(domainClazz, failOnErrorPackages);
} else {
failOnErrorEnabled = failOnError;
}
validateParams = new HashMap();
validateParams.put(ValidatePersistentMethod.ARGUMENT_DEEP_VALIDATE, false);
errorsProperty = domainMetaClass.getMetaProperty(AbstractDynamicPersistentMethod.ERRORS_PROPERTY);
validateMethod = domainMetaClass.getMetaMethod(ValidatePersistentMethod.METHOD_SIGNATURE, new Object[] { Map.class });
}
@Override
public boolean onPreUpdate(final PreUpdateEvent event) {
log.debug( "%% PRE UPDATE" );
return doWithManualSession(event, new Closure<Boolean>(this) {
@Override
public Boolean call() {
Object entity = event.getEntity();
boolean evict = false;
if (preUpdateEventListener != null) {
evict = preUpdateEventListener.call(entity);
synchronizePersisterState(entity, event.getPersister(), event.getState());
}
if (lastUpdatedProperty != null && shouldTimestamp) {
long time = System.currentTimeMillis();
if (TimeTravel.getUpdated(entity) != null) {
time = ((Date)TimeTravel.getUpdated(entity)).getTime();
log.debug("Updated time: " + TimeTravel.getUpdated(entity));
}
Object now = DefaultGroovyMethods.newInstance(lastUpdatedProperty.getType(), new Object[] { time });
event.getState()[ArrayUtils.indexOf(event.getPersister().getPropertyNames(), GrailsDomainClassProperty.LAST_UPDATED)] = now;
lastUpdatedProperty.setProperty(entity, now);
}
if (!AbstractSavePersistentMethod.isAutoValidationDisabled(entity)
&& !DefaultTypeTransformation.castToBoolean(validateMethod.invoke(entity,
new Object[] { validateParams }))) {
evict = true;
if (failOnErrorEnabled) {
Errors errors = (Errors) errorsProperty.getProperty(entity);
throw new ValidationException("Validation error whilst flushing entity [" + entity.getClass().getName()
+ "]", errors);
}
}
return evict;
}
});
}
@Override
public boolean onPreInsert(final PreInsertEvent event) {
log.debug( "%% PRE INSERT " + event.getEntity() + " (" + shouldTimestamp + ")");
return doWithManualSession(event, new Closure<Boolean>(this) {
@Override
public Boolean call() {
Object entity = event.getEntity();
boolean synchronizeState = false;
if (beforeInsertCaller != null) {
if (beforeInsertCaller.call(entity)) {
return true;
}
synchronizeState = true;
}
if (shouldTimestamp) {
long updateTime = System.currentTimeMillis();
long newTime = System.currentTimeMillis();
log.debug(">> PRE-INSERT: " + entity);
if (TimeTravel.getUpdated(entity) != null) {
updateTime = ((Date)TimeTravel.getUpdated(entity)).getTime();
}
if (TimeTravel.getNew(entity) != null) {
newTime = ((Date)TimeTravel.getNew(entity)).getTime();
}
if (dateCreatedProperty != null) {
Object now = DefaultGroovyMethods.newInstance(dateCreatedProperty.getType(), new Object[] { newTime });
dateCreatedProperty.setProperty(entity, now);
synchronizeState = true;
}
if (lastUpdatedProperty != null) {
Object now = DefaultGroovyMethods.newInstance(lastUpdatedProperty.getType(), new Object[] { updateTime });
lastUpdatedProperty.setProperty(entity, now);
synchronizeState = true;
}
}
if (synchronizeState) {
synchronizePersisterState(entity, event.getPersister(), event.getState());
}
boolean evict = false;
if (!AbstractSavePersistentMethod.isAutoValidationDisabled(entity)
&& !DefaultTypeTransformation.castToBoolean(validateMethod.invoke(entity,
new Object[] { validateParams }))) {
evict = true;
if (failOnErrorEnabled) {
Errors errors = (Errors) errorsProperty.getProperty(entity);
throw new ValidationException("Validation error whilst flushing entity [" + entity.getClass().getName()
+ "]", errors);
}
}
return evict;
}
});
}
private EventTriggerCaller buildCaller(Class<?> domainClazz, String event) {
Method method = ReflectionUtils.findMethod(domainClazz, event);
if (method != null) {
ReflectionUtils.makeAccessible(method);
return new MethodCaller(method);
}
Field field = ReflectionUtils.findField(domainClazz, event);
if (field != null) {
ReflectionUtils.makeAccessible(field);
return new FieldClosureCaller(field);
}
MetaMethod metaMethod = domainMetaClass.getMetaMethod(event, EMPTY_OBJECT_ARRAY);
if (metaMethod != null) {
return new MetaMethodCaller(metaMethod);
}
MetaProperty metaProperty = domainMetaClass.getMetaProperty(event);
if (metaProperty != null) {
return new MetaPropertyClosureCaller(metaProperty);
}
return null;
}
private void synchronizePersisterState(Object entity, EntityPersister persister, Object[] state) {
String[] propertyNames = persister.getPropertyNames();
for (int i = 0; i < propertyNames.length; i++) {
String p = propertyNames[i];
MetaProperty metaProperty = domainMetaClass.getMetaProperty(p);
if (ClosureEventTriggeringInterceptor.IGNORED.contains(p) || metaProperty == null) {
continue;
}
Object value = metaProperty.getProperty(entity);
state[i] = value;
persister.setPropertyValue(entity, i, value, EntityMode.POJO);
}
}
private <T> T doWithManualSession(AbstractEvent event, Closure<T> callable) {
Session session = event.getSession();
FlushMode current = session.getFlushMode();
try {
session.setFlushMode(FlushMode.MANUAL);
return callable.call();
} finally {
session.setFlushMode(current);
}
}
private static abstract class EventTriggerCaller {
public abstract boolean call(Object entity);
boolean resolveReturnValue(Object retval) {
if (retval instanceof Boolean) {
return !((Boolean) retval).booleanValue();
}
return false;
}
}
private static class MethodCaller extends EventTriggerCaller {
Method method;
MethodCaller(Method method) {
this.method = method;
}
@Override
public boolean call(Object entity) {
Object retval = ReflectionUtils.invokeMethod(method, entity);
return resolveReturnValue(retval);
}
}
private static class MetaMethodCaller extends EventTriggerCaller {
MetaMethod method;
MetaMethodCaller(MetaMethod method) {
this.method = method;
}
@Override
public boolean call(Object entity) {
Object retval = method.invoke(entity, EMPTY_OBJECT_ARRAY);
return resolveReturnValue(retval);
}
}
private static abstract class ClosureCaller extends EventTriggerCaller {
boolean cloneFirst = false;
Object callClosure(Object entity, Closure callable) {
if (cloneFirst) {
callable = (Closure)callable.clone();
}
callable.setResolveStrategy(Closure.DELEGATE_FIRST);
callable.setDelegate(entity);
return callable.call();
}
}
private static class FieldClosureCaller extends ClosureCaller {
Field field;
FieldClosureCaller(Field field) {
this.field = field;
if (Modifier.isStatic(field.getModifiers())) {
cloneFirst = true;
}
}
@Override
public boolean call(Object entity) {
Object fieldval = ReflectionUtils.getField(field, entity);
if (fieldval instanceof Closure) {
return resolveReturnValue(callClosure(entity, (Closure) fieldval));
}
log.error("Field " + field + " is not Closure or method.");
return false;
}
}
private static class MetaPropertyClosureCaller extends ClosureCaller {
MetaProperty metaProperty;
MetaPropertyClosureCaller(MetaProperty metaProperty) {
this.metaProperty = metaProperty;
if (Modifier.isStatic(metaProperty.getModifiers())) {
cloneFirst = true;
}
}
@Override
public boolean call(Object entity) {
Object fieldval = metaProperty.getProperty(entity);
if (fieldval instanceof Closure) {
return resolveReturnValue(callClosure(entity, (Closure) fieldval));
}
log.error("Field " + metaProperty + " is not Closure.");
return false;
}
}
}
| |
package org.aipim.web.service.dao;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.aipim.web.service.SyncService;
import org.aipim.web.service.UserUtil;
import org.aipim.web.service.connection.*;
import org.aipim.web.service.domain.*;
import org.apache.log4j.Logger;
import org.json.JSONException;
import org.json.JSONObject;
public class TaskManager {
private final static Logger logger = Logger.getLogger(TaskManager.class);
private java.sql.Connection connection;
public TaskManager() {
this(null, true);
}
public TaskManager(String _appToken, boolean _staging) {
this.connection = Connection.getConnection(_appToken, _staging);
}
public static TaskManager getInstance(Task task) {
if (task == null) {
task = new Task();
task.setToken("1");
task.setStaging(true);
}
return new TaskManager(task.getToken(), task.getStaging());
}
public static Task notify(final Task task) {
TaskManager taskManager = getInstance(task);
String message = task.getMessage();
switch (message) {
case SyncService.SYNC_GET_TOKEN:
taskManager.getToken(taskManager.newTask(task));
break;
case SyncService.SYNC_GET_USERS:
taskManager.getUsers(taskManager.newTask(task));
break;
case SyncService.SYNC_SET_USERS:
taskManager.setUsers(taskManager.newTask(task));
break;
case SyncService.SYNC_GET_PRODUCTS:
taskManager.getProducts(taskManager.newTask(task));
break;
case SyncService.SYNC_SET_PRODUCTS:
taskManager.setProducts(taskManager.newTask(task));
break;
case SyncService.SYNC_GET_CARTS:
taskManager.getCarts(taskManager.newTask(task));
break;
case SyncService.SYNC_SET_CARTS:
taskManager.setCarts(taskManager.newTask(task));
break;
case SyncService.SYNC_GET_TASKS:
taskManager.getTasks(taskManager.newTask(task));
break;
case SyncService.SYNC_SET_TASKS:
taskManager.setTasks(taskManager.newTask(task));
break;
}
return task;
}
public Task getToken(final Task task) {
String json = task.getRequest();
if (json != null && (!json.isEmpty())) {
JSONObject request = null;
try { request = new JSONObject(json); } catch (JSONException e) { e.printStackTrace(); }
if (request != null) {
//
System.out.println("#TEST#::"+ request.toString());
try {
String uniqueId = (request.has("uniqueId") && (!request.isNull("uniqueId"))) ?
(request.get("uniqueId") instanceof String) ?
request.getString("uniqueId") : "" : "";
String password = (request.has("password") && (!request.isNull("password"))) ?
(request.get("password") instanceof String) ?
request.getString("password") : "" : "";
String authbind = (request.has("authbind") && (!request.isNull("authbind"))) ?
(request.get("authbind") instanceof String) ?
request.getString("authbind") : "" : "";
String name = (request.has("name") && (!request.isNull("name"))) ?
(request.get("name") instanceof String) ?
request.getString("name") : "" : "";
String email = (request.has("email") && (!request.isNull("email"))) ?
(request.get("email") instanceof String) ?
request.getString("email") : "" : "";
UserManager userManager = new UserManager(task.getToken(), task.getStaging());
User user = new User();
if ((!uniqueId.isEmpty()) && (!password.isEmpty()) && userManager.checkCredentials(uniqueId, password)) {
user = userManager.getUserByUniqueId(uniqueId);
} else if ((!uniqueId.isEmpty()) && (!authbind.isEmpty())) {
if (userManager.checkAuthBindUniqueIdExists(uniqueId, authbind)) {
user = userManager.getUserByUniqueId(uniqueId);
user.setName(name);
user.setEmail(email);
userManager.updateUser(user);
} else if (userManager.checkUniqueIdExists(uniqueId)) {
user = userManager.getUserByUniqueId(uniqueId);
user.setAuthbind(authbind);
user.setName(name);
user.setEmail(email);
userManager.updateUser(user);
} else {
user.setUniqueId(uniqueId);
user.setName(name);
user.setEmail(email);
user.setAlternativeEmail(email);
UserUtil.setNewPassword(user, "fOsSoF" + authbind);
if (userManager.insertUser(user)) {
user = userManager.getUserByUniqueId(uniqueId);
}
}
}
task.setUrl(null);
task.setTitle(null);
task.setSubtitle(null);
task.setResponse(user.toString());
task.setRequest(null);
task.setStatus(Task.TASK_STATUS_DONE);
} catch (JSONException e) { e.printStackTrace(); }
//
}
}
return task;
}
public Task getUsers(final Task task) {
return task;
}
public Task setUsers(final Task task) {
return task;
}
public Task getProducts(final Task task) {
return task;
}
public Task setProducts(final Task task) {
return task;
}
public Task getCarts(final Task task) {
return task;
}
public Task setCarts(final Task task) {
return task;
}
public Task getTasks(final Task task) {
return task;
}
public Task setTasks(final Task task) {
return task;
}
public Task newTask(final Task task) {
task.setStatus(Task.TASK_STATUS_PENDING);
task.setArchived(Task.TASK_NOT_ARCHIVED);
task.setMessage(task.getMessage() +":"+ System.currentTimeMillis());
try {
PreparedStatement preparedStatement = connection
.prepareStatement("insert into tasks "
+ "(token, message, status, request, start_time, archived) "
+ "values "
+ "(?,?,?,?,now(),0);");
System.out.println("Task inserted: [" + task.getMessage() + "]");
preparedStatement.setString(1, task.getToken());
preparedStatement.setString(2, task.getMessage());
preparedStatement.setString(3, task.getStatus());
preparedStatement.setString(4, task.getRequest());
preparedStatement.executeUpdate();
getTaskByMessage(task, task.getMessage());
} catch (SQLException e) {
e.printStackTrace();
logger.error(e.toString());
}finally {
if (this.connection != null) {
try {
this.connection.close();
} catch (Exception e) {
/* handle close exception, quite usually ignore */ }
}
}
return task;
}
public Task getTaskByMessage(final Task task, String message) {
try {
PreparedStatement preparedStatement = connection.
prepareStatement("select * from tasks where message = ? and archived=0 order by uid desc limit 1;");
preparedStatement.setString(1, message);
ResultSet rs = preparedStatement.executeQuery();
if (rs.next()) {
setTaskResult(task, rs);
}
} catch (SQLException e) {
e.printStackTrace();
logger.error(e.toString());
}
finally {
if (this.connection != null) {
try {
this.connection.close();
} catch (Exception e) {
/* handle close exception, quite usually ignore */ }
}
}
return task;
}
public Task getTaskById(final Task task, int uid) {
try {
PreparedStatement preparedStatement = connection.
prepareStatement("select * from tasks where uid=?");
preparedStatement.setInt(1, uid);
ResultSet rs = preparedStatement.executeQuery();
if (rs.next()) {
setTaskResult(task, rs);
}
} catch (SQLException e) {
e.printStackTrace();
logger.error(e.toString());
}finally {
if (this.connection != null) {
try {
this.connection.close();
} catch (Exception e) {
/* handle close exception, quite usually ignore */ }
}
}
return task;
}
public boolean setTaskResult(final Task task, final ResultSet resultSet) {
boolean done = false;
try {
task.setUid(resultSet.getLong("uid"));
task.setToken(resultSet.getString("token"));
task.setMessage(resultSet.getString("message"));
task.setStatus(resultSet.getString("status"));
task.setRequest(resultSet.getString("request"));
task.setResponse(resultSet.getString("response"));
task.setStartTime(resultSet.getDate("start_time"));
task.setEndTime(resultSet.getDate("end_time"));
task.setArchived(resultSet.getInt("archived"));
done = true;
} catch (Exception e) {
e.printStackTrace();
logger.error(e.toString());
}
return done;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.composites;
import java.io.DataInput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Comparator;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.cql3.CQL3Row;
import org.apache.cassandra.cql3.ColumnIdentifier;
import org.apache.cassandra.db.Cell;
import org.apache.cassandra.db.ColumnSerializer;
import org.apache.cassandra.db.OnDiskAtom;
import org.apache.cassandra.db.filter.IDiskAtomFilter;
import org.apache.cassandra.db.filter.NamesQueryFilter;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.CollectionType;
import org.apache.cassandra.db.marshal.ColumnToCollectionType;
import org.apache.cassandra.io.ISerializer;
import org.apache.cassandra.io.IVersionedSerializer;
/**
* The type of CellNames.
*
* In the same way that a CellName is a Composite, a CellNameType is a CType, but
* with a number of method specific to cell names.
*
* On top of the dichotomy simple/truly-composite of composites, cell names comes
* in 2 variants: "dense" and "sparse". The sparse ones are CellName where one of
* the component (the last or second-to-last for collections) is used to store the
* CQL3 column name. Dense are those for which it's not the case.
*
* In other words, we have 4 types of CellName/CellNameType which correspond to the
* 4 type of table layout that we need to distinguish:
* 1. Simple (non-truly-composite) dense: this is the dynamic thrift CFs whose
* comparator is not composite.
* 2. Composite dense: this is the dynamic thrift CFs with a CompositeType comparator.
* 3. Simple (non-truly-composite) sparse: this is the thrift static CFs (that
* don't have a composite comparator).
* 4. Composite sparse: this is the CQL3 layout (note that this is the only one that
* support collections).
*/
public interface CellNameType extends CType
{
/**
* Whether or not the cell names for this type are dense.
*/
public boolean isDense();
/**
* The number of clustering columns for the table this is the type of.
*/
public int clusteringPrefixSize();
/**
* A builder for the clustering prefix.
*/
public CBuilder prefixBuilder();
/**
* The prefix to use for static columns.
*
* Note that create() methods below for creating CellName automatically handle static columns already
* for convenience, and so there is not need to pass this prefix for them. There is few other cases
* where we need the prefix directly however.
*/
public Composite staticPrefix();
/**
* Whether or not there is some collections defined in this type.
*/
public boolean hasCollections();
/**
* Whether or not this type layout support collections.
*/
public boolean supportCollections();
/**
* The type of the collections (or null if the type does not have any non-frozen collections).
*/
public ColumnToCollectionType collectionType();
/**
* Return the new type obtained by adding/updating to the new collection type for the provided column name
* to this type.
*/
public CellNameType addOrUpdateCollection(ColumnIdentifier columnName, CollectionType newCollection);
/**
* Returns a new CellNameType that is equivalent to this one but with one
* of the subtype replaced by the provided new type.
*/
@Override
public CellNameType setSubtype(int position, AbstractType<?> newType);
/**
* Creates a row marker for the CQL3 having the provided clustering prefix.
*
* Note that this is only valid for CQL3 tables (isCompound() and !isDense()) and should
* only be called for them.
*/
public CellName rowMarker(Composite prefix);
/**
* Creates a new CellName given a clustering prefix and a CQL3 column.
*
* Note that for dense types, the column can be null as a shortcut for designing the only
* COMPACT_VALUE column of the table.
*/
public CellName create(Composite prefix, ColumnDefinition column);
/**
* Creates a new collection CellName given a clustering prefix, a CQL3 column and the collection element.
*/
public CellName create(Composite prefix, ColumnDefinition column, ByteBuffer collectionElement);
/**
* Convenience method to create cell names given its components.
*
* This is equivalent to CType#make() but return a full cell name (and thus
* require all the components of the name).
*/
public CellName makeCellName(Object... components);
/**
* Deserialize a Composite from a ByteBuffer.
*
* This is equilvalent to CType#fromByteBuffer but assumes the buffer is a full cell
* name. This is meant for thrift to convert the fully serialized buffer we
* get from the clients.
*/
public CellName cellFromByteBuffer(ByteBuffer bb);
/**
* Creates a new CQL3Row builder for this type. See CQL3Row for details.
*/
public CQL3Row.Builder CQL3RowBuilder(CFMetaData metadata, long now);
// The two following methods are used to pass the declared regular column names (in CFMetaData)
// to the CellNameType. This is only used for optimization sake, see SparseCellNameType.
public void addCQL3Column(ColumnIdentifier id);
public void removeCQL3Column(ColumnIdentifier id);
/**
* Creates a new Deserializer. This is used by AtomDeserializer to do incremental and on-demand
* deserialization of the on disk atoms. See AtomDeserializer for details.
*/
public Deserializer newDeserializer(DataInput in);
/*
* Same as in CType, follows a number of per-CellNameType instances for the Comparator and Serializer used
* throughout the code (those that require full CellName versus just Composite).
*/
// Ultimately, those might be split into an IVersionedSerializer and an ISSTableSerializer
public ISerializer<CellName> cellSerializer();
public Comparator<Cell> columnComparator(boolean isRightNative);
public Comparator<Object> asymmetricColumnComparator(boolean isRightNative);
public Comparator<Cell> columnReverseComparator();
public Comparator<OnDiskAtom> onDiskAtomComparator();
public ColumnSerializer columnSerializer();
public OnDiskAtom.Serializer onDiskAtomSerializer();
public IVersionedSerializer<NamesQueryFilter> namesQueryFilterSerializer();
public IVersionedSerializer<IDiskAtomFilter> diskAtomFilterSerializer();
public interface Deserializer
{
/**
* Whether this deserializer is done or not, i.e. whether we're reached the end of row marker.
*/
public boolean hasNext() throws IOException;
/**
* Whether or not some name has been read but not consumed by readNext.
*/
public boolean hasUnprocessed() throws IOException;
/**
* Comparare the next name to read to the provided Composite.
* This does not consume the next name.
*/
public int compareNextTo(Composite composite) throws IOException;
/**
* Actually consume the next name and return it.
*/
public Composite readNext() throws IOException;
/**
* Skip the next name (consuming it).
*/
public void skipNext() throws IOException;
}
}
| |
/**
* $RCSfile: $
* $Revision: $
* $Date: $
*
* Copyright (C) 2005-2008 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.openfire.cluster;
import java.util.Collection;
import java.util.Queue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.LinkedBlockingQueue;
import org.jivesoftware.openfire.XMPPServer;
import org.jivesoftware.util.JiveGlobals;
import org.jivesoftware.util.JiveProperties;
import org.jivesoftware.util.cache.CacheFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A cluster manager is responsible for triggering events related to clustering.
* A future version will also provide statistics about the cluster.
*
* @author Gaston Dombiak
*/
public class ClusterManager {
private static final Logger Log = LoggerFactory.getLogger(ClusterManager.class);
public static String CLUSTER_PROPERTY_NAME = "clustering.enabled";
private static Queue<ClusterEventListener> listeners = new ConcurrentLinkedQueue<ClusterEventListener>();
private static BlockingQueue<Event> events = new LinkedBlockingQueue<Event>(10000);
static {
Thread thread = new Thread("ClusterManager events dispatcher") {
@Override
public void run() {
for (; ;) {
try {
Event event = events.take();
EventType eventType = event.getType();
// Make sure that CacheFactory is getting this events first (to update cache structure)
if (eventType == EventType.joined_cluster && event.getNodeID() == null) {
// Replace standalone caches with clustered caches. Local cached data is not moved.
CacheFactory.joinedCluster();
}
// Now notify rest of the listeners
for (ClusterEventListener listener : listeners) {
try {
switch (eventType) {
case joined_cluster: {
if (event.getNodeID() == null) {
listener.joinedCluster();
}
else {
listener.joinedCluster(event.getNodeID());
}
break;
}
case left_cluster: {
if (event.getNodeID() == null) {
listener.leftCluster();
}
else {
listener.leftCluster(event.getNodeID());
}
break;
}
case marked_senior_cluster_member: {
listener.markedAsSeniorClusterMember();
break;
}
default:
break;
}
}
catch (Exception e) {
Log.error(e.getMessage(), e);
}
}
// Mark event as processed
event.setProcessed(true);
} catch (InterruptedException e) {
Log.warn(e.getMessage(), e);
} catch (Exception e) {
Log.error(e.getMessage(), e);
}
}
}
};
thread.setDaemon(true);
thread.start();
}
/**
* Registers a listener to receive events.
*
* @param listener the listener.
*/
public static void addListener(ClusterEventListener listener) {
if (listener == null) {
throw new NullPointerException();
}
listeners.add(listener);
}
/**
* Unregisters a listener to receive events.
*
* @param listener the listener.
*/
public static void removeListener(ClusterEventListener listener) {
listeners.remove(listener);
}
/**
* Triggers event indicating that this JVM is now part of a cluster. At this point the
* {@link org.jivesoftware.openfire.XMPPServer#getNodeID()} holds the new nodeID value and
* the old nodeID value is passed in case the listener needs it.<p>
* <p/>
* When joining the cluster as the senior cluster member the {@link #fireMarkedAsSeniorClusterMember()}
* event will be sent right after this event.<p>
* <p/>
* This event will be triggered in another thread. This will avoid potential deadlocks
* in Coherence.
*
* @param asynchronous true if event will be triggered in background
*/
public static void fireJoinedCluster(boolean asynchronous) {
try {
Event event = new Event(EventType.joined_cluster, null);
events.put(event);
if (!asynchronous) {
while (!event.isProcessed()) {
Thread.sleep(50);
}
}
} catch (InterruptedException e) {
// Should never happen
Log.error(e.getMessage(), e);
}
}
/**
* Triggers event indicating that another JVM is now part of a cluster.<p>
*
* This event will be triggered in another thread. This will avoid potential deadlocks
* in Coherence.
*
* @param nodeID nodeID assigned to the JVM when joining the cluster.
* @param asynchronous true if event will be triggered in background
*/
public static void fireJoinedCluster(byte[] nodeID, boolean asynchronous) {
try {
Event event = new Event(EventType.joined_cluster, nodeID);
events.put(event);
if (!asynchronous) {
while (!event.isProcessed()) {
Thread.sleep(50);
}
}
} catch (InterruptedException e) {
// Should never happen
Log.error(e.getMessage(), e);
}
}
/**
* Triggers event indicating that this JVM is no longer part of the cluster. This could
* happen when disabling clustering support or removing the enterprise plugin that provides
* clustering support.<p>
*
* Moreover, if we were in a "split brain" scenario (ie. separated cluster islands) and the
* island were this JVM belonged was marked as "old" then all nodes of that island will
* get the <tt>left cluster event</tt> and <tt>joined cluster events</tt>. That means that
* caches will be reset and thus will need to be repopulated again with fresh data from this JVM.
* This also includes the case where this JVM was the senior cluster member and when the islands
* met again then this JVM stopped being the senior member.
*/
public static void fireLeftCluster() {
// Now notify rest of the listeners
for (ClusterEventListener listener : listeners) {
try {
listener.leftCluster();
}
catch (Exception e) {
Log.error(e.getMessage(), e);
}
}
}
/**
* Triggers event indicating that another JVM is no longer part of the cluster. This could
* happen when disabling clustering support or removing the enterprise plugin that provides
* clustering support.
*
* @param nodeID nodeID assigned to the JVM when joining the cluster.
*/
public static void fireLeftCluster(byte[] nodeID) {
try {
Event event = new Event(EventType.left_cluster, nodeID);
events.put(event);
} catch (InterruptedException e) {
// Should never happen
Log.error(e.getMessage(), e);
}
}
/**
* Triggers event indicating that this JVM is now the senior cluster member. This
* could either happen when initially joining the cluster or when the senior cluster
* member node left the cluster and this JVM was marked as the new senior cluster member.<p>
* <p/>
* Moreover, in the case of a "split brain" scenario (ie. separated cluster islands) each
* island will have its own senior cluster member. However, when the islands meet again there
* could only be one senior cluster member so one of the senior cluster members will stop playing
* that role. When that happens the JVM no longer playing that role will receive the
* {@link #fireLeftCluster()} and {@link #fireJoinedCluster(boolean)} events.<p>
* <p/>
* This event will be triggered in another thread. This will avoid potential deadlocks
* in Coherence.
*/
public static void fireMarkedAsSeniorClusterMember() {
try {
events.put(new Event(EventType.marked_senior_cluster_member, null));
} catch (InterruptedException e) {
// Should never happen
}
}
/**
* Starts the cluster service if clustering is enabled. The process of starting clustering
* will recreate caches as distributed caches.<p>
*
* Before starting a cluster the
* {@link XMPPServer#setRemoteSessionLocator(org.jivesoftware.openfire.session.RemoteSessionLocator)} and
* {@link org.jivesoftware.openfire.RoutingTable#setRemotePacketRouter(org.jivesoftware.openfire.RemotePacketRouter)}
* need to be properly configured.
*/
public static synchronized void startup() {
if (isClusteringStarted()) {
return;
}
// See if clustering should be enabled.
if (isClusteringEnabled()) {
if (XMPPServer.getInstance().getRemoteSessionLocator() == null) {
throw new IllegalStateException("No RemoteSessionLocator was found.");
}
if (XMPPServer.getInstance().getRoutingTable().getRemotePacketRouter() == null) {
throw new IllegalStateException("No RemotePacketRouter was found.");
}
// Start up the cluster and reset caches
CacheFactory.startClustering();
}
}
/**
* Shuts down the clustering service. This method should be called when the Jive
* system is shutting down, and must not be called otherwise. Failing to call
* this method may temporarily impact cluster performance, as the system will
* have to do extra work to recover from a non-clean shutdown.
* If clustering is not enabled, this method will do nothing.
*/
public static synchronized void shutdown() {
// Reset packet router to use to deliver packets to remote cluster nodes
XMPPServer.getInstance().getRoutingTable().setRemotePacketRouter(null);
if (isClusteringStarted()) {
Log.debug("ClusterManager: Shutting down clustered cache service.");
CacheFactory.stopClustering();
}
// Reset the session locator to use
XMPPServer.getInstance().setRemoteSessionLocator(null);
}
/**
* Sets true if clustering support is enabled. An attempt to start or join
* an existing cluster will be attempted in the service was enabled. On the
* other hand, if disabled then this JVM will leave or stop the cluster.
*
* @param enabled if clustering support is enabled.
*/
public static void setClusteringEnabled(boolean enabled) {
if (enabled) {
// Check that clustering is not already enabled and we are already in a cluster
if (isClusteringEnabled() && isClusteringStarted()) {
return;
}
}
else {
// Check that clustering is disabled
if (!isClusteringEnabled()) {
return;
}
}
JiveGlobals.setXMLProperty(CLUSTER_PROPERTY_NAME, Boolean.toString(enabled));
if (!enabled) {
shutdown();
}
else {
// Reload Jive properties. This will ensure that this nodes copy of the
// properties starts correct.
JiveProperties.getInstance().init();
startup();
}
}
/**
* Returns true if clustering support is enabled. This does not mean
* that clustering has started or that clustering will be able to start.
*
* @return true if clustering support is enabled.
*/
public static boolean isClusteringEnabled() {
return JiveGlobals.getXMLProperty(CLUSTER_PROPERTY_NAME, false);
}
/**
* Returns true if clustering is installed and can be used by this JVM
* to join a cluster. A false value could mean that either clustering
* support is not available or the license does not allow to have more
* than 1 cluster node.
*
* @return true if clustering is installed and can be used by
* this JVM to join a cluster.
*/
public static boolean isClusteringAvailable() {
return CacheFactory.isClusteringAvailable();
}
/**
* Returns true is clustering is currently being started. Once the cluster
* is started or failed to be started this value will be false.
*
* @return true is clustering is currently being started.
*/
public static boolean isClusteringStarting() {
return CacheFactory.isClusteringStarting();
}
/**
* Returns true if this JVM is part of a cluster. The cluster may have many nodes
* or this JVM could be the only node.
*
* @return true if this JVM is part of a cluster.
*/
public static boolean isClusteringStarted() {
return CacheFactory.isClusteringStarted();
}
/**
* Returns true if this member is the senior member in the cluster. If clustering
* is not enabled, this method will also return true. This test is useful for
* tasks that should only be run on a single member in a cluster.
*
* @return true if this cluster member is the senior or if clustering is not enabled.
*/
public static boolean isSeniorClusterMember() {
return CacheFactory.isSeniorClusterMember();
}
/**
* Returns basic information about the current members of the cluster or an empty
* collection if not running in a cluster.
*
* @return information about the current members of the cluster or an empty
* collection if not running in a cluster.
*/
public static Collection<ClusterNodeInfo> getNodesInfo() {
return CacheFactory.getClusterNodesInfo();
}
/**
* Returns the maximum number of cluster members allowed. Both values 0 and 1 mean that clustering
* is not available. However, a value of 1 means that it's a license problem rather than not having
* the ability to do clustering as defined with value 0.
*
* @return the maximum number of cluster members allowed or 0 or 1 if clustering is not allowed.
*/
public static int getMaxClusterNodes() {
return CacheFactory.getMaxClusterNodes();
}
/**
* Returns the id of the node that is the senior cluster member. When not in a cluster the returned
* node id will be the {@link XMPPServer#getNodeID()}.
*
* @return the id of the node that is the senior cluster member.
*/
public static NodeID getSeniorClusterMember() {
byte[] clusterMemberID = CacheFactory.getSeniorClusterMemberID();
if (clusterMemberID == null) {
return XMPPServer.getInstance().getNodeID();
}
return NodeID.getInstance(clusterMemberID);
}
/**
* Returns true if the specified node ID belongs to a known cluster node
* of this cluster.
*
* @param nodeID the ID of the node to verify.
* @return true if the specified node ID belongs to a known cluster node
* of this cluster.
*/
public static boolean isClusterMember(byte[] nodeID) {
for (ClusterNodeInfo nodeInfo : getNodesInfo()) {
if (nodeInfo.getNodeID().equals(nodeID)) {
return true;
}
}
return false;
}
private static class Event {
private EventType type;
private byte[] nodeID;
private boolean processed;
public Event(EventType type, byte[] oldNodeID) {
this.type = type;
this.nodeID = oldNodeID;
}
public EventType getType() {
return type;
}
public byte[] getNodeID() {
return nodeID;
}
public boolean isProcessed() {
return processed;
}
public void setProcessed(boolean processed) {
this.processed = processed;
}
@Override
public String toString() {
return super.toString() + " type: " + type;
}
}
/**
* Represents valid event types.
*/
private enum EventType {
/**
* This JVM joined a cluster.
*/
joined_cluster,
/**
* This JVM is no longer part of the cluster.
*/
left_cluster,
/**
* This JVM is now the senior cluster member.
*/
marked_senior_cluster_member
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.core.request.mapper;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Locale;
import org.apache.wicket.MockPage;
import org.apache.wicket.core.request.handler.IPageProvider;
import org.apache.wicket.core.request.handler.IPageRequestHandler;
import org.apache.wicket.core.request.handler.ListenerRequestHandler;
import org.apache.wicket.core.request.handler.PageAndComponentProvider;
import org.apache.wicket.core.request.handler.PageProvider;
import org.apache.wicket.core.request.handler.RenderPageRequestHandler;
import org.apache.wicket.request.IRequestHandler;
import org.apache.wicket.request.Request;
import org.apache.wicket.request.Url;
import org.apache.wicket.request.component.IRequestableComponent;
import org.apache.wicket.request.component.IRequestablePage;
import org.junit.jupiter.api.Test;
/**
*
* @author Matej Knopp
*/
class PageInstanceMapperTest extends AbstractMapperTest
{
private final PageInstanceMapper encoder = new PageInstanceMapper()
{
@Override
protected IMapperContext getContext()
{
return context;
}
};
/**
*
*/
@Test
void decode1()
{
Url url = Url.parse("wicket/page?4");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertThat(handler).isInstanceOf(RenderPageRequestHandler.class);
RenderPageRequestHandler h = (RenderPageRequestHandler)handler;
checkPage(h.getPage(), 4);
}
/**
*
*/
@Test
void decode2()
{
Url url = Url.parse("wicket/page?4&a=3&b=3");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertThat(handler).isInstanceOf(RenderPageRequestHandler.class);
RenderPageRequestHandler h = (RenderPageRequestHandler)handler;
checkPage(h.getPage(), 4);
}
/**
*
*/
@Test
void ignoreIfPageIdentifierHasSegmentsAfterIt()
{
Url url = Url.parse("wicket/page/ingore/me?4&a=3&b=3");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertNull(handler);
}
/**
*
*/
@Test
void decode3()
{
Url url = Url.parse("wicket/page?4--a-b-c");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertThat(handler).isInstanceOf(ListenerRequestHandler.class);
ListenerRequestHandler h = (ListenerRequestHandler)handler;
checkPage(h.getPage(), 4);
assertEquals(h.getComponent().getPageRelativePath(), "a:b:c");
assertNull(h.getBehaviorIndex());
}
/**
*
*/
@Test
void decode4()
{
Url url = Url.parse("wickett/pagee?4--a:b-c");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertNull(handler);
}
/**
*
*/
@Test
void decode5()
{
Url url = Url.parse("wicket/page?abc");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertNull(handler);
}
/**
*
*/
@Test
void decode6()
{
Url url = Url.parse("wicket/page?4-ILinkListener.5-a-b-c");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertThat(handler).isInstanceOf(ListenerRequestHandler.class);
ListenerRequestHandler h = (ListenerRequestHandler)handler;
checkPage(h.getPage(), 4);
assertEquals(h.getComponent().getPageRelativePath(), "a:b:c");
assertEquals((Object)5, h.getBehaviorIndex());
}
/**
*
*/
@Test
void decode7()
{
Url url = Url.parse("wicket/page?4-6.5-a-b-c");
context.setNextPageRenderCount(6);
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertThat(handler).isInstanceOf(ListenerRequestHandler.class);
ListenerRequestHandler h = (ListenerRequestHandler)handler;
assertEquals(6, h.getPage().getRenderCount());
}
/**
*
*/
@Test
void decode8()
{
Url url = Url.parse("wicket/page?4-6.5-a-b-c");
context.setNextPageRenderCount(8);
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertThrows(StalePageException.class, () -> {
((IPageRequestHandler)handler).getPage();
});
}
@Test
void decode9()
{
final Url url = Url.parse("page?4");
Request request = new Request()
{
@Override
public Url getUrl()
{
return url;
}
@Override
public Locale getLocale()
{
return null;
}
@Override
public Charset getCharset()
{
return StandardCharsets.UTF_8;
}
@Override
public Url getClientUrl()
{
return Url.parse("wicket/page");
}
@Override
public Object getContainerRequest()
{
return null;
}
};
IRequestHandler handler = encoder.mapRequest(request);
IRequestablePage page = ((IPageRequestHandler)handler).getPage();
checkPage(page, 4);
}
@Test
void decode10()
{
final Url url = Url.parse("page?4");
Request request = new Request()
{
@Override
public Url getUrl()
{
return url;
}
@Override
public Locale getLocale()
{
return null;
}
@Override
public Charset getCharset()
{
return StandardCharsets.UTF_8;
}
@Override
public Url getClientUrl()
{
return Url.parse("page");
}
@Override
public Object getContainerRequest()
{
return null;
}
};
IRequestHandler handler = encoder.mapRequest(request);
IRequestablePage page = ((IPageRequestHandler)handler).getPage();
checkPage(page, 4);
}
/**
*
*/
@Test
void encode1()
{
MockPage page = new MockPage(15);
IPageProvider provider = new PageProvider(page);
IRequestHandler handler = new RenderPageRequestHandler(provider);
Url url = encoder.mapHandler(handler);
assertEquals("wicket/page?15", url.toString());
}
/**
*
*/
@Test
void encode2()
{
MockPage page = new MockPage(15);
page.setRenderCount(5);
IRequestableComponent c = page.get("a:b:c");
PageAndComponentProvider provider = new PageAndComponentProvider(page, c);
IRequestHandler handler = new ListenerRequestHandler(provider);
Url url = encoder.mapHandler(handler);
assertEquals("wicket/page?15-5.-a-b-c", url.toString());
}
}
| |
/*
Derby - Class org.apache.derbyTesting.functionTests.tests.compatibility.CompatibilitySuite
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* <p>
* This is the JUnit suite verifying compatibility of Derby clients and
* servers across Derby version levels and supported VMs. When you want
* to add a new class of tests to this suite, just add the classname to
* the accumulator in suite().
* </p>
*
*/
package org.apache.derbyTesting.functionTests.tests.junitTests.compatibility;
import java.io.*;
import java.sql.*;
import java.util.*;
import junit.framework.*;
import org.apache.derbyTesting.functionTests.util.DerbyJUnitTest;
public class CompatibilitySuite extends DerbyJUnitTest
{
/////////////////////////////////////////////////////////////
//
// CONSTANTS
//
/////////////////////////////////////////////////////////////
// Supported versions of the db2jcc client.
public static final Version IBM_2_4 = new Version( 2, 4 );
// Supported versions of Derby.
public static final Version DRB_10_0 = new Version( 10, 0 );
public static final Version DRB_10_1 = new Version( 10, 1 );
public static final Version DRB_10_2 = new Version( 10, 2 );
// Supported VM versions.
public static final Version VM_1_3 = new Version( 1, 3 );
public static final Version VM_1_4 = new Version( 1, 4 );
public static final Version VM_1_5 = new Version( 1, 5 );
public static final String SERVER_VERSION_FUNCTION = "getVMVersion";
private static final String VERSION_PROPERTY = "java.version";
private static final int EXPECTED_CLIENT_COUNT = 1;
/////////////////////////////////////////////////////////////
//
// STATE
//
/////////////////////////////////////////////////////////////
private static Driver _driver; // the corresponding jdbc driver
private static Version _clientVMLevel; // level of client-side vm
private static Version _serverVMLevel; // level of server vm
private static Version _driverLevel; // client rev level
private static Version _serverLevel; // server rev level
/////////////////////////////////////////////////////////////
//
// JUnit BEHAVIOR
//
/////////////////////////////////////////////////////////////
/**
* <p>
* JUnit boilerplate which adds as test cases all public methods
* whose names start with the string "test" in the named classes.
* When you want to add a new class of tests, just wire it into
* this suite.
* </p>
*/
public static Test suite()
{
TestSuite testSuite = new TestSuite();
testSuite.addTestSuite( JDBCDriverTest.class );
return testSuite;
}
/////////////////////////////////////////////////////////////
//
// ENTRY POINT
//
/////////////////////////////////////////////////////////////
/**
* <p>
* Run JDBC compatibility tests using either the specified client or
* the client that is visible
* on the classpath. If there is more than one client on the classpath,
* exits with an error.
* </p>
*
* <ul>
* <li>arg[ 0 ] = required name of database to connect to</li>
* <li>arg[ 1 ] = optional driver to use. if not specified, we'll look for a
* client on the classpath</li>
* </ul>
*/
public static void main( String args[] )
throws Exception
{
int exitStatus = FAILURE_EXIT;
if (
parseDebug() &&
parseArgs( args ) &&
parseVMLevel() &&
findClient() &&
findServer()
)
{
Test t = suite();
println("CompatibilitySuite.main() will run suite with "
+ t.countTestCases() + " testcases.");
TestResult result = junit.textui.TestRunner.run( t );
exitStatus = result.errorCount() + result.failureCount();
}
Runtime.getRuntime().exit( exitStatus );
}
/////////////////////////////////////////////////////////////
//
// PUBLIC BEHAVIOR
//
/////////////////////////////////////////////////////////////
/**
* <p>
* Get the version of the server.
* </p>
*/
public Version getServerVersion() { return _serverLevel; }
/**
* <p>
* Get the version of the client.
* </p>
*/
public Version getDriverVersion() { return _driverLevel; }
/**
* <p>
* Get the vm level of the server.
* </p>
*/
public static Version getServerVMVersion() { return _serverVMLevel; }
/**
* <p>
* Get the vm level of the client.
* </p>
*/
public Version getClientVMVersion() { return _clientVMLevel; }
/////////////////////////////////////////////////////////////
//
// DATABASE-SIDE FUNCTIONS
//
/////////////////////////////////////////////////////////////
/**
* <p>
* Get the vm level of the server.
* </p>
*/
public static String getVMVersion()
{
return System.getProperty( VERSION_PROPERTY );
}
/////////////////////////////////////////////////////////////
//
// MINIONS
//
/////////////////////////////////////////////////////////////
///////////////////
//
// GENERAL MINIONS
//
///////////////////
//////////////////////////
//
// INITIALIZATION MINIONS
//
//////////////////////////
//
// Initialize client settings based on the client found.
// Return true if one and only one client found, false otherwise.
// We allow for the special case when we're running the embedded client
// off the current compiled class tree rather than off product jars.
//
static boolean findClient()
throws Exception
{
//
// The client may have been specified on the command line.
// In that case, we don't bother looking for a client on
// the classpath.
//
if ( getClientSettings() != null ) { faultInDriver( getClientSettings() ); }
else
{
String currentClientName = null;
int legalCount = LEGAL_CLIENTS.length;
int foundCount = 0;
for ( int i = 0; i < legalCount; i++ )
{
String[] candidate = LEGAL_CLIENTS[ i ];
if ( faultInDriver( candidate ) )
{
setClient( candidate );
foundCount++;
}
}
if ( foundCount != EXPECTED_CLIENT_COUNT )
{
throw new Exception( "Wrong number of drivers: " + foundCount );
}
}
// Now make sure that the JDBC driver is what we expect
try {
_driver = DriverManager.getDriver( getClientSettings()[ DATABASE_URL ] );
_driverLevel = new Version( _driver.getMajorVersion(), _driver.getMinorVersion() );
}
catch (SQLException e)
{
printStackTrace( e );
throw new Exception
( "Driver doesn't understand expected URL: " + getClientSettings()[ DATABASE_URL ] );
}
println
(
"Driver " + _driver.getClass().getName() +
" Version = " + _driverLevel
);
return true;
}
//
// Initialize server settings. Assumes that you have called
// findClient().
//
static boolean findServer()
throws Exception
{
try {
Connection conn = getConnection();
DatabaseMetaData dmd = conn.getMetaData();
String dbProductVersion = dmd.getDatabaseProductVersion();
_serverLevel = new Version( dbProductVersion );
parseServerVMVersion( conn );
}
catch (Exception e)
{
printStackTrace( e );
throw new Exception( "Error lookup up server info: " + e.getMessage() );
}
println( "Server Version = " + _serverLevel );
return true;
}
static boolean parseVMLevel()
throws Exception
{
String vmVersion = getVMVersion();
try {
_clientVMLevel = new Version( vmVersion );
}
catch (NumberFormatException e)
{
throw new Exception( "Badly formatted vm version: " + vmVersion );
}
println( "VM Version = " + _clientVMLevel );
return true;
}
static boolean parseArgs( String args[] )
throws Exception
{
if ( ( args == null ) || (args.length == 0 ) )
{ throw new Exception( "Missing database name." ); }
setDatabaseName( args[ 0 ] );
if ( (args.length > 1) && !"".equals( args[ 1 ] ) )
{
String desiredClientName = args[ 1 ];
int count = LEGAL_CLIENTS.length;
for ( int i = 0; i < count; i++ )
{
String[] candidate = LEGAL_CLIENTS[ i ];
if ( desiredClientName.equals( candidate[ DRIVER_NAME ] ) )
{
setClient( candidate );
break;
}
}
if ( getClientSettings() == null )
{
throw new Exception
( "Could not find client " + desiredClientName + " on the classpath." );
}
}
return true;
}
/**
* <p>
* Get the vm level of the server.
* </p>
*/
static void parseServerVMVersion( Connection conn )
throws SQLException
{
dropFunction( conn, SERVER_VERSION_FUNCTION );
PreparedStatement ps = prepare
(
conn,
"create function " + SERVER_VERSION_FUNCTION + "() returns varchar(50)\n" +
"parameter style java no sql language java\n" +
"external name 'org.apache.derbyTesting.functionTests.tests.junitTests.compatibility.CompatibilitySuite.getVMVersion'"
);
ps.execute();
close( ps );
ps = prepare
(
conn,
"values " + SERVER_VERSION_FUNCTION + "()"
);
ResultSet rs = ps.executeQuery();
rs.next();
String rawVersion = rs.getString( 1 );
close( rs );
close( ps );
_serverVMLevel = new Version( rawVersion );
println( "Server VM Version = " + _serverVMLevel );
}
///////////////
//
// SQL MINIONS
//
///////////////
/////////////////////////////////////////////////////////////
//
// INNER CLASSES
//
/////////////////////////////////////////////////////////////
/**
* <p>
* This helper class exposes an entry point for creating an empty database.
* </p>
*/
public static final class Creator
{
private static CompatibilitySuite _driver = new CompatibilitySuite();
/**
* <p>
* Wait for server to come up, then create the database.
* </p>
*
* <ul>
* <li>args[ 0 ] = name of database to create.</li>
* </ul>
*/
public static void main( String[] args )
throws Exception
{
String databaseName = args[ 0 ];
CompatibilitySuite.findClient();
_driver.createDB( databaseName );
}
}
/**
* <p>
* A class for storing a major and minor version number. This class
* assumes that more capable versions compare greater than less capable versions.
* </p>
*/
public static final class Version implements Comparable
{
private int _major;
private int _minor;
public Version( int major, int minor )
{
constructorMinion( major, minor );
}
public Version( String desc )
throws NumberFormatException
{
StringTokenizer tokens = new StringTokenizer( desc, "." );
constructorMinion
(
java.lang.Integer.parseInt( tokens.nextToken() ),
java.lang.Integer.parseInt( tokens.nextToken() )
);
}
private void constructorMinion( int major, int minor )
{
_major = major;
_minor = minor;
}
/**
* <p>
* Returns true if this Version is at least as advanced
* as that Version.
* </p>
*/
public boolean atLeast( Version that )
{
return this.compareTo( that ) > -1;
}
////////////////////////////////////////////////////////
//
// Comparable BEHAVIOR
//
////////////////////////////////////////////////////////
public int compareTo( Object other )
{
if ( other == null ) { return -1; }
if ( !( other instanceof Version ) ) { return -1; }
Version that = (Version) other;
if ( this._major < that._major ) { return -1; }
if ( this._major > that._major ) { return 1; }
return this._minor - that._minor;
}
////////////////////////////////////////////////////////
//
// Object OVERLOADS
//
////////////////////////////////////////////////////////
public String toString()
{
return Integer.toString( _major ) + '.' + Integer.toString( _minor );
}
public boolean equals( Object other )
{
return (compareTo( other ) == 0);
}
public int hashCode()
{
return _major ^ _minor;
}
}
}
| |
/*
* Copyright (C) 2015-2017 Fabio Ticconi
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.github.fabioticconi.alone.systems;
import com.artemis.ComponentMapper;
import com.artemis.EntityEdit;
import com.artemis.annotations.Wire;
import com.artemis.managers.PlayerManager;
import com.artemis.utils.IntBag;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.fabioticconi.alone.components.*;
import com.github.fabioticconi.alone.components.attributes.*;
import com.github.fabioticconi.alone.constants.Options;
import com.github.fabioticconi.alone.constants.TerrainType;
import net.mostlyoriginal.api.system.core.PassiveSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import rlforj.math.Point;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
/**
* @author Fabio Ticconi
*/
public class CreatureSystem extends PassiveSystem
{
static final Logger log = LoggerFactory.getLogger(CreatureSystem.class);
ComponentMapper<Name> mName;
ComponentMapper<AI> mAI;
ComponentMapper<Position> mPosition;
ComponentMapper<Strength> mStr;
ComponentMapper<Agility> mAgi;
ComponentMapper<Constitution> mCon;
ComponentMapper<Herbivore> mHerbivore;
ComponentMapper<Carnivore> mCarnivore;
GroupSystem sGroup;
MapSystem sMap;
ItemSystem sItems;
MapSystem map;
PlayerManager pManager;
@Wire
Random r;
@Wire
ObjectMapper mapper;
HashMap<String, CreatureTemplate> templates;
boolean loaded = false;
@Override
protected void initialize()
{
try
{
loadTemplates();
} catch (final IOException e)
{
e.printStackTrace();
}
}
public HashMap<String, CreatureTemplate> getTemplates()
{
try
{
loadTemplates();
} catch (final IOException e)
{
e.printStackTrace();
}
return templates;
}
public void loadTemplates() throws IOException
{
final InputStream fileStream = new FileInputStream("data/creatures.yml");
templates = mapper.readValue(fileStream, new TypeReference<HashMap<String, CreatureTemplate>>()
{
});
for (final Map.Entry<String, CreatureTemplate> entry : templates.entrySet())
{
final CreatureTemplate temp = entry.getValue();
temp.tag = entry.getKey();
}
}
public static class CreatureTemplate
{
public String name;
public String tag;
public Strength strength;
public Agility agility;
public Constitution constitution;
public Herbivore herbivore;
public Carnivore carnivore;
public Skin skin;
public Sight sight;
public AI ai;
public Group group;
public Sprite sprite;
public Player player;
public Inventory inventory;
public Underwater underwater;
}
public void reset()
{
loaded = false;
placeObjects();
}
/**
* It instantiates an object of the given type and places at that Point.
*
* @param tag
* @param p
* @return
*/
public int makeCreature(final String tag, final Point p)
{
return makeCreature(tag, p.x, p.y);
}
/**
* It instantiates an object of the given type and places at that position.
*
* @param tag
* @param x
* @param y
* @return
*/
public int makeCreature(final String tag, final int x, final int y)
{
final int id = makeCreature(tag);
if (id < 0)
return id;
final Point p = map.getFirstTotallyFree(x, y, -1);
mPosition.create(id).set(p.x, p.y);
map.obstacles.set(id, p.x, p.y);
return id;
}
public int makeCreature(final String tag)
{
final CreatureTemplate template = templates.get(tag);
if (template == null)
{
log.warn("Creature named {} doesn't exist", tag);
return -1;
}
final int id = world.create();
final EntityEdit edit = world.edit(id);
edit.add(new Name(template.name, tag));
if (template.strength != null)
edit.add(template.strength);
if (template.agility != null)
edit.add(template.agility);
if (template.constitution != null)
edit.add(template.constitution);
if (template.skin != null)
edit.add(template.skin);
if (template.sight != null)
edit.add(template.sight);
if (template.herbivore != null)
edit.add(template.herbivore);
if (template.carnivore != null)
edit.add(template.carnivore);
if (template.ai != null)
{
template.ai.cooldown = r.nextFloat() * AISystem.BASE_TICKTIME + 1.0f;
edit.add(template.ai);
}
if (template.group != null)
edit.add(template.group);
if (template.sprite != null)
edit.add(template.sprite);
if (template.player != null)
edit.add(template.player);
if (template.inventory != null)
edit.add(template.inventory);
if (template.underwater != null)
edit.add(template.underwater);
edit.create(Alertness.class).value = 0.0f;
makeDerivative(id);
return id;
}
public void placeObjects()
{
if (loaded)
return;
loaded = true;
int x;
int y;
// add player
x = Options.MAP_SIZE_X / 2;
y = Options.MAP_SIZE_Y / 2;
int id = makeCreature("player", x, y);
pManager.setPlayer(world.getEntity(id), "player");
world.edit(id).add(new Name("You", "you"));
// add a herd of buffalos
int groupId = sGroup.createGroup();
IntBag group = sGroup.getGroup(groupId);
for (int i = 0; i < 4; i++)
{
x = (Options.MAP_SIZE_X / 2) + r.nextInt(12) - 6;
y = (Options.MAP_SIZE_Y / 2) + r.nextInt(12) - 6;
id = makeCreature("buffalo", x, y);
world.edit(id).create(Group.class).groupId = groupId;
group.add(id);
}
// add small, independent rabbits/hares
for (int i = 0; i < 7; i++)
{
x = (Options.MAP_SIZE_X / 2) + r.nextInt(12) - 6;
y = (Options.MAP_SIZE_Y / 2) + r.nextInt(12) - 6;
makeCreature("rabbit", x, y);
}
// add a pack of wolves
groupId = sGroup.createGroup();
group = sGroup.getGroup(groupId);
for (int i = 0; i < 5; i++)
{
x = (Options.MAP_SIZE_X / 2) + r.nextInt(12) - 6;
y = (Options.MAP_SIZE_Y / 2) + r.nextInt(12) - 6;
id = makeCreature("wolf", x, y);
world.edit(id).create(Group.class).groupId = groupId;
group.add(id);
}
// add solitary pumas
for (int i = 0; i < 5; i++)
{
x = (Options.MAP_SIZE_X / 2) + r.nextInt(12) - 6;
y = (Options.MAP_SIZE_Y / 2) + r.nextInt(12) - 6;
makeCreature("puma", x, y);
}
// add fish in the sea
for (x = 0; x < Options.MAP_SIZE_X; x++)
{
for (y = 0; y < Options.MAP_SIZE_Y; y++)
{
final MapSystem.Cell cell = sMap.get(x, y);
if (cell.type.equals(TerrainType.WATER) && r.nextGaussian() > 5f)
{
if (!map.obstacles.isEmpty(x, y))
continue;
makeCreature("fish", x, y);
}
}
}
// add random trees
for (x = 0; x < Options.MAP_SIZE_X; x++)
{
for (y = 0; y < Options.MAP_SIZE_Y; y++)
{
final MapSystem.Cell cell = sMap.get(x, y);
if ((cell.type.equals(TerrainType.GRASS) && r.nextGaussian() > 2.5f) ||
(cell.type.equals(TerrainType.LAND) && r.nextGaussian() > 3f))
{
if (!map.obstacles.isEmpty(x, y))
continue;
// 1% of the trees are fallen remains
if (r.nextFloat() < 0.1f)
{
sItems.makeItem("trunk", x, y);
if (r.nextBoolean())
sItems.makeItem("branch", x, y);
if (r.nextBoolean())
sItems.makeItem("vine", x, y);
}
else
{
sItems.makeItem("tree", x, y);
}
}
}
}
// add random boulders
for (x = 0; x < Options.MAP_SIZE_X; x++)
{
for (y = 0; y < Options.MAP_SIZE_Y; y++)
{
final MapSystem.Cell cell = sMap.get(x, y);
if ((cell.type.equals(TerrainType.GRASS) && r.nextGaussian() > 3.5f) ||
(cell.type.equals(TerrainType.LAND) && r.nextGaussian() > 3f))
{
if (!map.obstacles.isEmpty(x, y))
continue;
sItems.makeItem("boulder", x, y);
}
}
}
// add random stones
for (x = 0; x < Options.MAP_SIZE_X; x++)
{
for (y = 0; y < Options.MAP_SIZE_Y; y++)
{
final MapSystem.Cell cell = sMap.get(x, y);
if ((cell.type.equals(TerrainType.GRASS) && r.nextGaussian() > 3f) ||
(cell.type.equals(TerrainType.LAND) && r.nextGaussian() > 2.5f))
{
if (!map.items.isEmpty(x, y))
continue;
sItems.makeItem("stone", x, y);
}
}
}
log.info("initialised");
}
public void makeDerivative(final int id)
{
final Strength str = mStr.get(id);
final Agility agi = mAgi.get(id);
final Constitution con = mCon.get(id);
final EntityEdit edit = world.edit(id);
// Secondary Attributes
final int size = Math.round((con.value - agi.value) / 2f);
edit.create(Size.class).set(size);
edit.create(Stamina.class).set((5 + str.value + con.value) * 100); // FIXME for debug, reduce/tweak later
edit.create(Speed.class).set((con.value - str.value - agi.value + 6) / 12f);
edit.create(Health.class).set((con.value + 3) * 10);
// Tertiary Attributes
// a fish does not need to eat, for now
if (mHerbivore.has(id) || mCarnivore.has(id))
edit.create(Hunger.class).set(0f, (size / 2f) + 2f);
}
}
| |
/*
* User: anna
* Date: 18-Aug-2009
*/
package com.intellij.refactoring.typeMigration.rules;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Comparing;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.impl.PsiDiamondTypeUtil;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.util.TypeConversionUtil;
import com.intellij.refactoring.typeMigration.TypeConversionDescriptor;
import com.intellij.refactoring.typeMigration.TypeConversionDescriptorBase;
import com.intellij.refactoring.typeMigration.TypeEvaluator;
import com.intellij.refactoring.typeMigration.TypeMigrationLabeler;
import com.siyeh.ig.psiutils.ParenthesesUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.concurrent.atomic.*;
public class AtomicConversionRule extends TypeConversionRule {
private static final Logger LOG = Logger.getInstance(AtomicConversionRule.class);
@Override
public TypeConversionDescriptorBase findConversion(PsiType from,
PsiType to,
PsiMember member,
PsiExpression context,
TypeMigrationLabeler labeler) {
if (to instanceof PsiClassType && isAtomicTypeMigration(from, (PsiClassType)to, context)) {
return findDirectConversion(context, to, from);
}
else if (from instanceof PsiClassType && isAtomicTypeMigration(to, (PsiClassType)from, context)) {
return findReverseConversion(context);
}
return null;
}
private static boolean isAtomicTypeMigration(PsiType from, PsiClassType to, PsiExpression context) {
if (PsiType.INT.equals(from) && to.getCanonicalText().equals(AtomicInteger.class.getName())) {
return true;
}
if (from.equals(PsiType.INT.createArrayType()) && to.getCanonicalText().equals(AtomicIntegerArray.class.getName())) {
return true;
}
if (PsiType.LONG.equals(from) && to.getCanonicalText().equals(AtomicLong.class.getName())) {
return true;
}
if (from.equals(PsiType.LONG.createArrayType()) && to.getCanonicalText().equals(AtomicLongArray.class.getName())) {
return true;
}
if (PsiType.BOOLEAN.equals(from) && to.getCanonicalText().equals(AtomicBoolean.class.getName())) {
return true;
}
final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(to);
final PsiClass atomicClass = resolveResult.getElement();
if (atomicClass != null) {
final String typeQualifiedName = atomicClass.getQualifiedName();
if (!Comparing.strEqual(typeQualifiedName, AtomicReference.class.getName()) &&
!Comparing.strEqual(typeQualifiedName, AtomicReferenceArray.class.getName())) {
return false;
}
final PsiTypeParameter[] typeParameters = atomicClass.getTypeParameters();
if (typeParameters.length != 1) return false;
final PsiType toTypeParameterValue = resolveResult.getSubstitutor().substitute(typeParameters[0]);
if (toTypeParameterValue != null) {
if (from.getDeepComponentType() instanceof PsiPrimitiveType) {
final PsiPrimitiveType unboxedInitialType = PsiPrimitiveType.getUnboxedType(toTypeParameterValue);
if (unboxedInitialType != null) {
return TypeConversionUtil.areTypesConvertible(from.getDeepComponentType(), unboxedInitialType);
}
}
else {
return TypeConversionUtil.isAssignable(from.getDeepComponentType(), PsiUtil.captureToplevelWildcards(toTypeParameterValue, context));
}
}
}
return false;
}
@Nullable
public static TypeConversionDescriptor findDirectConversion(PsiElement context, PsiType to, PsiType from) {
final PsiClass toTypeClass = PsiUtil.resolveClassInType(to);
LOG.assertTrue(toTypeClass != null);
final String qualifiedName = toTypeClass.getQualifiedName();
if (qualifiedName != null) {
if (qualifiedName.equals(AtomicInteger.class.getName()) || qualifiedName.equals(AtomicLong.class.getName())) {
if (context instanceof PsiPostfixExpression) {
final IElementType operationSign = ((PsiPostfixExpression)context).getOperationTokenType();
if (operationSign == JavaTokenType.MINUSMINUS) {
return new TypeConversionDescriptor("$qualifier$--", "$qualifier$.getAndDecrement()");
}
if (operationSign == JavaTokenType.PLUSPLUS) {
return new TypeConversionDescriptor("$qualifier$++", "$qualifier$.getAndIncrement()");
}
}
else if (context instanceof PsiPrefixExpression) {
final IElementType operationSign = ((PsiPrefixExpression)context).getOperationTokenType();
if (operationSign == JavaTokenType.MINUSMINUS) {
return new TypeConversionDescriptor("--$qualifier$", "$qualifier$.decrementAndGet()");
}
if (operationSign == JavaTokenType.PLUSPLUS) {
return new TypeConversionDescriptor("++$qualifier$", "$qualifier$.incrementAndGet()");
}
}
else if (context instanceof PsiAssignmentExpression) {
final PsiJavaToken signToken = ((PsiAssignmentExpression)context).getOperationSign();
final IElementType operationSign = signToken.getTokenType();
final String sign = signToken.getText();
if (operationSign == JavaTokenType.PLUSEQ || operationSign == JavaTokenType.MINUSEQ) {
return new TypeConversionDescriptor("$qualifier$ " + sign + " $val$",
"$qualifier$.addAndGet(" + (operationSign == JavaTokenType.MINUSEQ ? "-($val$))" : "$val$)")) {
@Override
public PsiExpression replace(PsiExpression expression, @NotNull TypeEvaluator evaluator) {
final PsiMethodCallExpression result = (PsiMethodCallExpression)super.replace(expression, evaluator);
final PsiExpression argument = result.getArgumentList().getExpressions()[0];
if (argument instanceof PsiPrefixExpression) {
final PsiExpression operand = ((PsiPrefixExpression)argument).getOperand();
final PsiExpression striped = ParenthesesUtils.stripParentheses(operand);
if (striped != null && operand != striped) {
operand.replace(striped);
}
}
return result;
}
};
}
}
else if (context instanceof PsiLiteralExpression && !(context.getParent() instanceof PsiAssignmentExpression)) {
return wrapWithNewExpression(to, from, (PsiExpression)context, context);
}
}
else if (qualifiedName.equals(AtomicIntegerArray.class.getName()) || qualifiedName.equals(AtomicLongArray.class.getName())) {
PsiElement parentExpression = context.getParent();
if (parentExpression instanceof PsiPostfixExpression) {
final IElementType operationSign = ((PsiPostfixExpression)parentExpression).getOperationTokenType();
if (operationSign == JavaTokenType.MINUSMINUS) {
return new TypeConversionDescriptor("$qualifier$[$idx$]--", "$qualifier$.getAndDecrement($idx$)",
(PsiExpression)parentExpression);
}
if (operationSign == JavaTokenType.PLUSPLUS) {
return new TypeConversionDescriptor("$qualifier$[$idx$]++", "$qualifier$.getAndIncrement($idx$)",
(PsiExpression)parentExpression);
}
}
else if (parentExpression instanceof PsiPrefixExpression) {
final IElementType operationSign = ((PsiPrefixExpression)parentExpression).getOperationTokenType();
if (operationSign == JavaTokenType.MINUSMINUS) {
return new TypeConversionDescriptor("--$qualifier$[$idx$]", "$qualifier$.decrementAndGet($idx$)",
(PsiExpression)parentExpression);
}
if (operationSign == JavaTokenType.PLUSPLUS) {
return new TypeConversionDescriptor("++$qualifier$[$idx$]", "$qualifier$.incrementAndGet($idx$)",
(PsiExpression)parentExpression);
}
}
else if (parentExpression instanceof PsiAssignmentExpression) {
final PsiJavaToken signToken = ((PsiAssignmentExpression)parentExpression).getOperationSign();
final IElementType operationSign = signToken.getTokenType();
final String sign = signToken.getText();
if (operationSign == JavaTokenType.PLUSEQ || operationSign == JavaTokenType.MINUSEQ) {
return new TypeConversionDescriptor("$qualifier$[$idx$] " + sign + " $val$", "$qualifier$.getAndAdd($idx$, " +
(operationSign == JavaTokenType.MINUSEQ
? "-"
: "") +
"($val$))", (PsiExpression)parentExpression);
}
}
}
}
return from instanceof PsiArrayType
? findDirectConversionForAtomicReferenceArray(context, to, from)
: findDirectConversionForAtomicReference(context, to, from);
}
@Nullable
private static TypeConversionDescriptor findDirectConversionForAtomicReference(PsiElement context, PsiType to, PsiType from) {
final PsiElement parent = context.getParent();
if (parent instanceof PsiAssignmentExpression) {
final IElementType operationSign = ((PsiAssignmentExpression)parent).getOperationTokenType();
if (operationSign == JavaTokenType.EQ) {
return new TypeConversionDescriptor("$qualifier$ = $val$", "$qualifier$.set($val$)", (PsiAssignmentExpression)parent);
}
}
if (context instanceof PsiReferenceExpression) {
final PsiExpression qualifierExpression = ((PsiReferenceExpression)context).getQualifierExpression();
final PsiExpression expression = context.getParent() instanceof PsiMethodCallExpression && qualifierExpression != null
? qualifierExpression
: (PsiExpression)context;
return new TypeConversionDescriptor("$qualifier$", "$qualifier$.get()", expression);
}
else if (context instanceof PsiAssignmentExpression) {
final PsiJavaToken signToken = ((PsiAssignmentExpression)context).getOperationSign();
final IElementType operationSign = signToken.getTokenType();
final String sign = signToken.getText();
if (parent instanceof PsiExpressionStatement) {
if (operationSign == JavaTokenType.EQ) {
final PsiExpression lExpression = ((PsiAssignmentExpression)context).getLExpression();
if (lExpression instanceof PsiReferenceExpression) {
final PsiElement element = ((PsiReferenceExpression)lExpression).resolve();
if (element instanceof PsiVariable && ((PsiVariable)element).hasModifierProperty(PsiModifier.FINAL)) {
return wrapWithNewExpression(to, from, ((PsiAssignmentExpression)context).getRExpression(), element);
}
}
return new TypeConversionDescriptor("$qualifier$ = $val$", "$qualifier$.set($val$)");
}
else {
if (PsiUtil.isLanguageLevel8OrHigher(context)) {
final String name =
JavaCodeStyleManager.getInstance(context.getProject()).suggestUniqueVariableName("v", context, false);
return new TypeConversionDescriptor("$qualifier$" + sign + "$val$", "$qualifier$.updateAndGet("
+ name + " -> " + getBoxedWrapper(from, to, name + " " + sign.charAt(0) + " $val$)")); }
else {
if (context.getParent() instanceof PsiStatement) {
return new TypeConversionDescriptor("$qualifier$" + sign + "$val$", "$qualifier$.set(" +
getBoxedWrapper(from, to, "$qualifier$.get() " +
sign.charAt(0) +
" $val$") +
")");
} else {
return null;
}
}
}
} //else should be a conflict
}
else if (context instanceof PsiPostfixExpression) {
final String sign = ((PsiPostfixExpression)context).getOperationSign().getText();
return new TypeConversionDescriptor("$qualifier$" + sign, "$qualifier$.getAndSet(" +
getBoxedWrapper(from, to, "$qualifier$.get() " + sign.charAt(0) + " 1") +
")");
}
else if (context instanceof PsiPrefixExpression) {
final PsiJavaToken operationSign = ((PsiPrefixExpression)context).getOperationSign();
if (operationSign.getTokenType() == JavaTokenType.EXCL) {
return new TypeConversionDescriptor("!$qualifier$", "!$qualifier$.get()");
}
final String sign = operationSign.getText();
return new TypeConversionDescriptor(sign + "$qualifier$", "$qualifier$.set(" + //todo reject?
getBoxedWrapper(from, to, "$qualifier$.get() " + sign.charAt(0) + " 1") +
")");
}
if (parent instanceof PsiVariable) {
return wrapWithNewExpression(to, from, null, parent);
}
return null;
}
public static TypeConversionDescriptor wrapWithNewExpression(PsiType to, PsiType from, @Nullable PsiExpression expression, PsiElement context) {
final String typeText = PsiDiamondTypeUtil.getCollapsedType(to, context);
final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(to);
final PsiClass atomicClass = resolveResult.getElement();
LOG.assertTrue(atomicClass != null);
final PsiTypeParameter[] typeParameters = atomicClass.getTypeParameters();
if (typeParameters.length == 1) {
final PsiType initial = resolveResult.getSubstitutor().substitute(typeParameters[0]);
final PsiPrimitiveType unboxedInitialType = PsiPrimitiveType.getUnboxedType(initial);
if (unboxedInitialType != null) {
if (from instanceof PsiPrimitiveType) {
final PsiClassType boxedFromType = ((PsiPrimitiveType)from).getBoxedType(atomicClass);
LOG.assertTrue(boxedFromType != null);
if (!TypeConversionUtil.isAssignable(initial, boxedFromType)) {
return new TypeConversionDescriptor("$val$", "new " + typeText + "((" + unboxedInitialType.getCanonicalText() + ")$val$)", expression);
}
}
}
}
return new TypeConversionDescriptor("$val$", "new " + typeText + "($val$)", expression);
}
@Nullable
private static TypeConversionDescriptor findDirectConversionForAtomicReferenceArray(PsiElement context, PsiType to, PsiType from) {
LOG.assertTrue(from instanceof PsiArrayType);
from = ((PsiArrayType)from).getComponentType();
final PsiElement parent = context.getParent();
final PsiElement parentParent = parent.getParent();
if (parent instanceof PsiAssignmentExpression) {
final PsiAssignmentExpression assignmentExpression = (PsiAssignmentExpression)parent;
final IElementType operationSign = assignmentExpression.getOperationTokenType();
final String sign = assignmentExpression.getOperationSign().getText();
if (context instanceof PsiArrayAccessExpression) {
if (parentParent instanceof PsiExpressionStatement) {
if (assignmentExpression.getLExpression() == context) {
if (operationSign == JavaTokenType.EQ) {
return new TypeConversionDescriptor("$qualifier$[$idx$] = $val$", "$qualifier$.set($idx$, $val$)", assignmentExpression);
}
else {
return new TypeConversionDescriptor("$qualifier$[$idx$]" + sign + "$val$",
"$qualifier$.set($idx$, " + getBoxedWrapper(from, to, "$qualifier$.get($idx$) " + sign.charAt(0) + " $val$") + ")",
assignmentExpression);
}
}
} //else should be a conflict
}
else {
final PsiExpression rExpression = assignmentExpression.getRExpression();
if (rExpression == context && operationSign == JavaTokenType.EQ) { //array = new T[l];
return wrapWithNewExpression(to, from, rExpression, context);
}
}
} else if (parent instanceof PsiVariable) {
if (((PsiVariable)parent).getInitializer() == context) {
return wrapWithNewExpression(to, from, (PsiExpression)context, context);
}
}
if (parentParent instanceof PsiExpressionStatement) {
if (parent instanceof PsiPostfixExpression) {
final String sign = ((PsiPostfixExpression)parent).getOperationSign().getText();
return new TypeConversionDescriptor("$qualifier$[$idx$]" + sign, "$qualifier$.getAndSet($idx$, " +
getBoxedWrapper(from, to,
"$qualifier$.get($idx$) " + sign.charAt(0) + " 1") +
")", (PsiExpression)parent);
}
else if (parent instanceof PsiPrefixExpression) {
final String sign = ((PsiPrefixExpression)parent).getOperationSign().getText();
return new TypeConversionDescriptor(sign + "$qualifier$[$idx$]", "$qualifier$.set($idx$, " +
getBoxedWrapper(from, to,
"$qualifier$.get($idx$) " + sign.charAt(0) + " 1") +
")", (PsiExpression)parent);
}
else if (parent instanceof PsiBinaryExpression) {
final String sign = ((PsiBinaryExpression)parent).getOperationSign().getText();
return new TypeConversionDescriptor("$qualifier$[$idx$]" + sign + "$val$", "$qualifier$.set($idx$, " +
getBoxedWrapper(from, to, "$qualifier$.get($idx$) " +
sign +
" $val$)") +
")", (PsiExpression)parent);
}
}
if (context instanceof PsiArrayAccessExpression) {
return new TypeConversionDescriptor("$qualifier$[$idx$]", "$qualifier$.get($idx$)", (PsiExpression)context);
}
return null;
}
private static String getBoxedWrapper(final PsiType from, final PsiType to, @NotNull String arg) {
final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(to);
final PsiClass atomicClass = resolveResult.getElement();
LOG.assertTrue(atomicClass != null);
final PsiTypeParameter[] typeParameters = atomicClass.getTypeParameters();
if (typeParameters.length == 1) {
final PsiSubstitutor substitutor = resolveResult.getSubstitutor();
LOG.assertTrue(substitutor.isValid());
final PsiType initial = substitutor.substitute(typeParameters[0]);
final PsiPrimitiveType unboxedInitialType = PsiPrimitiveType.getUnboxedType(initial);
if (unboxedInitialType != null) {
if (from instanceof PsiPrimitiveType) {
final PsiClassType boxedFromType = ((PsiPrimitiveType)from).getBoxedType(atomicClass);
LOG.assertTrue(boxedFromType != null);
return "new " + initial.getPresentableText() + "((" + unboxedInitialType.getCanonicalText() + ")(" + arg + "))";
}
}
}
return arg;
}
@Nullable
private static TypeConversionDescriptor findReverseConversion(PsiElement context) {
if (context instanceof PsiReferenceExpression) {
if (context.getParent() instanceof PsiMethodCallExpression) {
return findReverseConversionForMethodCall(context);
}
}
else if (context instanceof PsiNewExpression) {
return new TypeConversionDescriptor("new $type$($qualifier$)", "$qualifier$");
}
else if (context instanceof PsiMethodCallExpression) {
return findReverseConversionForMethodCall(((PsiMethodCallExpression)context).getMethodExpression());
}
return null;
}
@Nullable
private static TypeConversionDescriptor findReverseConversionForMethodCall(PsiElement context) {
final PsiElement resolved = ((PsiReferenceExpression)context).resolve();
if (resolved instanceof PsiMethod) {
final PsiMethod method = (PsiMethod)resolved;
final int parametersCount = method.getParameterList().getParametersCount();
final String resolvedName = method.getName();
if (Comparing.strEqual(resolvedName, "get")) {
return parametersCount == 0 ?
new TypeConversionDescriptor("$qualifier$.get()", "$qualifier$") :
new TypeConversionDescriptor("$qualifier$.get($idx$)", "$qualifier$[$idx$]");
}
else if (Comparing.strEqual(resolvedName, "set")) {
return parametersCount == 1 ?
new TypeConversionDescriptor("$qualifier$.set($val$)", "$qualifier$ = $val$") :
new TypeConversionDescriptor("$qualifier$.set($idx$, $val$)", "$qualifier$[$idx$] = $val$");
}
else if (Comparing.strEqual(resolvedName, "addAndGet")) {
return parametersCount == 1 ?
new TypeConversionDescriptor("$qualifier$.addAndGet($delta$)", "$qualifier$ + $delta$") :
new TypeConversionDescriptor("$qualifier$.addAndGet($idx$, $delta$)", "$qualifier$[$idx$] + $delta$");
}
else if (Comparing.strEqual(resolvedName, "incrementAndGet")) {
return parametersCount == 0 ?
new TypeConversionDescriptor("$qualifier$.incrementAndGet()", "++$qualifier$") :
new TypeConversionDescriptor("$qualifier$.incrementAndGet($idx$)", "++$qualifier$[$idx$]");
}
else if (Comparing.strEqual(resolvedName, "decrementAndGet")) {
return parametersCount == 0 ?
new TypeConversionDescriptor("$qualifier$.decrementAndGet()", "--$qualifier$") :
new TypeConversionDescriptor("$qualifier$.decrementAndGet($idx$)", "--$qualifier$[$idx$]");
}
else if (Comparing.strEqual(resolvedName, "getAndIncrement")) {
return parametersCount == 0 ?
new TypeConversionDescriptor("$qualifier$.getAndIncrement()", "$qualifier$++") :
new TypeConversionDescriptor("$qualifier$.getAndIncrement($idx$)", "$qualifier$[$idx$]++");
}
else if (Comparing.strEqual(resolvedName, "getAndDecrement")) {
return parametersCount == 0 ?
new TypeConversionDescriptor("$qualifier$.getAndDecrement()", "$qualifier$--") :
new TypeConversionDescriptor("$qualifier$.getAndDecrement($idx$)", "$qualifier$[$idx$]--");
}
else if (Comparing.strEqual(resolvedName, "getAndAdd")) {
return parametersCount == 1?
new TypeConversionDescriptor("$qualifier$.getAndAdd($val$)", "$qualifier$ += $val$") :
new TypeConversionDescriptor("$qualifier$.getAndAdd($idx$, $val$)", "$qualifier$[$idx$] += $val$");
}
else if (Comparing.strEqual(resolvedName, "getAndSet")) {
return parametersCount == 1 ?
new TypeConversionDescriptor("$qualifier$.getAndSet($val$)", "$qualifier$ = $val$") :
new TypeConversionDescriptor("$qualifier$.getAndSet($idx$, $val$)", "$qualifier$[$idx$] = $val$");
}
}
return null;
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.log.Logger;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.ImmutableFlavor;
import com.facebook.buck.model.UnflavoredBuildTarget;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.BuildTargetSourcePath;
import com.facebook.buck.rules.DependencyAggregation;
import com.facebook.buck.rules.ExplicitBuildTargetSourcePath;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.SymlinkTree;
import com.facebook.buck.rules.coercer.FrameworkPath;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.MoreCollectors;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.facebook.buck.util.immutables.BuckStyleTuple;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.base.Suppliers;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Lists;
import org.immutables.value.Value;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import javax.annotation.Nonnull;
@Value.Immutable
@BuckStyleImmutable
abstract class AbstractCxxSourceRuleFactory {
private static final Logger LOG = Logger.get(AbstractCxxSourceRuleFactory.class);
private static final String COMPILE_FLAVOR_PREFIX = "compile-";
private static final Flavor AGGREGATED_PREPROCESS_DEPS_FLAVOR =
ImmutableFlavor.of("preprocessor-deps");
@Value.Parameter
protected abstract BuildRuleParams getParams();
@Value.Parameter
protected abstract BuildRuleResolver getResolver();
@Value.Parameter
protected abstract SourcePathResolver getPathResolver();
@Value.Parameter
protected abstract SourcePathRuleFinder getRuleFinder();
@Value.Parameter
protected abstract CxxBuckConfig getCxxBuckConfig();
@Value.Parameter
protected abstract CxxPlatform getCxxPlatform();
@Value.Parameter
protected abstract ImmutableList<CxxPreprocessorInput> getCxxPreprocessorInput();
@Value.Parameter
protected abstract ImmutableMultimap<CxxSource.Type, String> getCompilerFlags();
/** NOTE: {@code prefix_header} is incompatible with {@code precompiled_header}. */
@Value.Parameter
protected abstract Optional<SourcePath> getPrefixHeader();
/** NOTE: {@code precompiled_header} is incompatible with {@code prefix_header}. */
@Value.Parameter
protected abstract Optional<SourcePath> getPrecompiledHeader();
@Value.Parameter
protected abstract PicType getPicType();
@Value.Parameter
protected abstract Optional<SymlinkTree> getSandboxTree();
@Value.Check
protected void checkPrefixAndPrecompiledHeaderArgs() {
if (getPrefixHeader().isPresent() && getPrecompiledHeader().isPresent()) {
throw new HumanReadableException(
"Cannot use `prefix_header` and `precompiled_header` in the same rule.");
}
}
private ImmutableSortedSet<BuildRule> getPreprocessDeps() {
ImmutableSortedSet.Builder<BuildRule> builder = ImmutableSortedSet.naturalOrder();
for (CxxPreprocessorInput input : getCxxPreprocessorInput()) {
builder.addAll(input.getDeps(getResolver(), getRuleFinder()));
}
if (getPrefixHeader().isPresent()) {
builder.addAll(getRuleFinder().filterBuildRuleInputs(getPrefixHeader().get()));
}
if (getPrecompiledHeader().isPresent()) {
builder.addAll(getRuleFinder().filterBuildRuleInputs(getPrecompiledHeader().get()));
}
if (getSandboxTree().isPresent()) {
SymlinkTree tree = getSandboxTree().get();
builder.add(tree);
builder.addAll(getRuleFinder().filterBuildRuleInputs(tree.getLinks().values()));
}
return builder.build();
}
@Value.Lazy
protected ImmutableSet<FrameworkPath> getFrameworks() {
return getCxxPreprocessorInput().stream()
.flatMap(input -> input.getFrameworks().stream())
.collect(MoreCollectors.toImmutableSet());
}
@Value.Lazy
protected ImmutableList<CxxHeaders> getIncludes() {
return getCxxPreprocessorInput().stream()
.flatMap(input -> input.getIncludes().stream())
.collect(MoreCollectors.toImmutableList());
}
private final LoadingCache<CxxSource.Type, ImmutableList<String>> preprocessorFlags =
CacheBuilder.newBuilder()
.build(
new CacheLoader<CxxSource.Type, ImmutableList<String>>() {
@Override
public ImmutableList<String> load(@Nonnull CxxSource.Type type) {
ImmutableList.Builder<String> builder = ImmutableList.builder();
for (CxxPreprocessorInput input : getCxxPreprocessorInput()) {
builder.addAll(input.getPreprocessorFlags().get(type));
}
return builder.build();
}
});
private final LoadingCache<PreprocessorDelegateCacheKey, PreprocessorDelegateCacheValue>
preprocessorDelegates = CacheBuilder.newBuilder()
.build(new PreprocessorDelegateCacheLoader());
/**
* Returns the no-op rule that aggregates the preprocessor dependencies.
*
* Individual compile rules can depend on it, instead of having to depend on every preprocessor
* dep themselves. This turns O(n*m) dependencies into O(n+m) dependencies, where n is number of
* files in a target, and m is the number of targets.
*/
private BuildRule requireAggregatedPreprocessDepsRule() {
BuildTarget target = createAggregatedPreprocessDepsBuildTarget();
Optional<DependencyAggregation> existingRule =
getResolver().getRuleOptionalWithType(target, DependencyAggregation.class);
if (existingRule.isPresent()) {
return existingRule.get();
} else {
BuildRuleParams params = getParams().copyWithChanges(
target,
Suppliers.ofInstance(getPreprocessDeps()),
Suppliers.ofInstance(ImmutableSortedSet.of()));
DependencyAggregation rule = new DependencyAggregation(params);
getResolver().addToIndex(rule);
return rule;
}
}
@VisibleForTesting
BuildTarget createAggregatedPreprocessDepsBuildTarget() {
return BuildTarget.builder(getParams().getBuildTarget())
.addFlavors(getCxxPlatform().getFlavor(), AGGREGATED_PREPROCESS_DEPS_FLAVOR)
.build();
}
private String getOutputName(String name) {
List<String> parts = Lists.newArrayList();
for (String part : Splitter.on(File.separator).omitEmptyStrings().split(name)) {
// TODO(#7877540): Remove once we prevent disabling package boundary checks.
parts.add(part.equals("..") ? "__PAR__" : part);
}
return Joiner.on(File.separator).join(parts);
}
/**
* @return the object file name for the given source name.
*/
private String getCompileOutputName(String name) {
Linker ld = getCxxPlatform().getLd().resolve(getResolver());
String outName = ld.hasFilePathSizeLimitations() ? "out" : getOutputName(name);
return outName + "." + getCxxPlatform().getObjectFileExtension();
}
private String getCompileFlavorSuffix(String name) {
return getOutputName(name) + "." + getCxxPlatform().getObjectFileExtension();
}
/**
* @return the output path for an object file compiled from the source with the given name.
*/
@VisibleForTesting
Path getCompileOutputPath(BuildTarget target, String name) {
return BuildTargets.getGenPath(getParams().getProjectFilesystem(), target, "%s")
.resolve(getCompileOutputName(name));
}
/**
* @return a build target for a {@link CxxPreprocessAndCompile} rule for the source with the
* given name.
*/
@VisibleForTesting
public BuildTarget createCompileBuildTarget(String name) {
String outputName = CxxFlavorSanitizer.sanitize(getCompileFlavorSuffix(name));
return BuildTarget
.builder(getParams().getBuildTarget())
.addFlavors(getCxxPlatform().getFlavor())
.addFlavors(
ImmutableFlavor.of(
String.format(
COMPILE_FLAVOR_PREFIX + "%s%s",
getPicType() == PicType.PIC ? "pic-" : "",
outputName)))
.build();
}
public BuildTarget createInferCaptureBuildTarget(String name) {
String outputName = CxxFlavorSanitizer.sanitize(getCompileFlavorSuffix(name));
return BuildTarget
.builder(getParams().getBuildTarget())
.addAllFlavors(getParams().getBuildTarget().getFlavors())
.addFlavors(getCxxPlatform().getFlavor())
.addFlavors(ImmutableFlavor.of(String.format("%s-%s",
CxxInferEnhancer.InferFlavors.INFER_CAPTURE.get().toString(),
outputName)))
.build();
}
public static boolean isCompileFlavoredBuildTarget(BuildTarget target) {
return target.getFlavors().stream()
.anyMatch(flavor -> flavor.getName().startsWith(COMPILE_FLAVOR_PREFIX));
}
private ImmutableList<String> getPlatformCompileFlags(CxxSource.Type type) {
ImmutableList.Builder<String> args = ImmutableList.builder();
// Add in the source-type specific platform compiler flags.
args.addAll(CxxSourceTypes.getPlatformCompilerFlags(getCxxPlatform(), type));
// These source types require assembling, so add in platform-specific assembler flags.
//
// TODO(andrewjcg): We shouldn't care about lower-level assembling. If the user has assembler
// flags in mind which they want to propagate to other languages, they should pass them in via
// some other means (e.g. `.buckconfig`).
if (type == CxxSource.Type.C_CPP_OUTPUT ||
type == CxxSource.Type.OBJC_CPP_OUTPUT ||
type == CxxSource.Type.CXX_CPP_OUTPUT ||
type == CxxSource.Type.OBJCXX_CPP_OUTPUT ||
type == CxxSource.Type.CUDA_CPP_OUTPUT) {
args.addAll(getCxxPlatform().getAsflags());
}
return args.build();
}
private ImmutableList<String> getRuleCompileFlags(CxxSource.Type type) {
return ImmutableList.copyOf(getCompilerFlags().get(type));
}
/**
* @return a {@link CxxPreprocessAndCompile} rule that preprocesses, compiles, and assembles the
* given {@link CxxSource}.
*/
@VisibleForTesting
public CxxPreprocessAndCompile createCompileBuildRule(
String name,
CxxSource source) {
Preconditions.checkArgument(CxxSourceTypes.isCompilableType(source.getType()));
BuildTarget target = createCompileBuildTarget(name);
DepsBuilder depsBuilder = new DepsBuilder(getRuleFinder());
Compiler compiler =
CxxSourceTypes.getCompiler(getCxxPlatform(), source.getType())
.resolve(getResolver());
// Build up the list of compiler flags.
CxxToolFlags flags = CxxToolFlags.explicitBuilder()
// If we're using pic, add in the appropriate flag.
.addAllPlatformFlags(getPicType().getFlags(compiler))
// Add in the platform specific compiler flags.
.addAllPlatformFlags(getPlatformCompileFlags(source.getType()))
// Add custom compiler flags.
.addAllRuleFlags(getRuleCompileFlags(source.getType()))
// Add custom per-file flags.
.addAllRuleFlags(source.getFlags())
.build();
CompilerDelegate compilerDelegate =
new CompilerDelegate(
getPathResolver(),
getCxxPlatform().getCompilerDebugPathSanitizer(),
compiler,
flags);
depsBuilder.add(compilerDelegate);
depsBuilder.add(source);
// Build the CxxCompile rule and add it to our sorted set of build rules.
CxxPreprocessAndCompile result = CxxPreprocessAndCompile.compile(
getParams().copyWithChanges(
target,
Suppliers.ofInstance(depsBuilder.build()),
Suppliers.ofInstance(ImmutableSortedSet.of())),
compilerDelegate,
getCompileOutputPath(target, name),
source.getPath(),
source.getType(),
getCxxPlatform().getCompilerDebugPathSanitizer(),
getCxxPlatform().getAssemblerDebugPathSanitizer(),
getSandboxTree());
getResolver().addToIndex(result);
return result;
}
@VisibleForTesting
CxxPreprocessAndCompile requireCompileBuildRule(
String name,
CxxSource source) {
BuildTarget target = createCompileBuildTarget(name);
Optional<CxxPreprocessAndCompile> existingRule = getResolver().getRuleOptionalWithType(
target, CxxPreprocessAndCompile.class);
if (existingRule.isPresent()) {
if (!existingRule.get().getInput().equals(source.getPath())) {
throw new RuntimeException(
String.format("Hash collision for %s; a build rule would have been ignored.", name));
}
return existingRule.get();
}
return createCompileBuildRule(name, source);
}
private CxxToolFlags computePreprocessorFlags(
CxxSource.Type type,
ImmutableList<String> sourceFlags) {
Compiler compiler = CxxSourceTypes.getCompiler(
getCxxPlatform(),
CxxSourceTypes.getPreprocessorOutputType(type)).resolve(getResolver());
return CxxToolFlags.explicitBuilder()
.addAllPlatformFlags(getPicType().getFlags(compiler))
.addAllPlatformFlags(CxxSourceTypes.getPlatformPreprocessFlags(getCxxPlatform(), type))
.addAllRuleFlags(preprocessorFlags.getUnchecked(type))
// Add custom per-file flags.
.addAllRuleFlags(sourceFlags)
.build();
}
private CxxToolFlags computeCompilerFlags(
CxxSource.Type type,
ImmutableList<String> sourceFlags) {
AbstractCxxSource.Type outputType = CxxSourceTypes.getPreprocessorOutputType(type);
return CxxToolFlags.explicitBuilder()
// If we're using pic, add in the appropriate flag.
.addAllPlatformFlags(
getPicType().getFlags(CxxSourceTypes.getCompiler(getCxxPlatform(), outputType)
.resolve(getResolver())))
// Add in the platform specific compiler flags.
.addAllPlatformFlags(
getPlatformCompileFlags(outputType))
.addAllRuleFlags(getRuleCompileFlags(outputType))
.addAllRuleFlags(sourceFlags)
.build();
}
private CxxInferCapture requireInferCaptureBuildRule(
String name,
CxxSource source,
InferBuckConfig inferConfig) {
BuildTarget target = createInferCaptureBuildTarget(name);
Optional<CxxInferCapture> existingRule = getResolver().getRuleOptionalWithType(
target, CxxInferCapture.class);
if (existingRule.isPresent()) {
return existingRule.get();
}
return createInferCaptureBuildRule(target, name, source, inferConfig);
}
private CxxInferCapture createInferCaptureBuildRule(
BuildTarget target,
String name,
CxxSource source,
InferBuckConfig inferConfig) {
Preconditions.checkArgument(CxxSourceTypes.isPreprocessableType(source.getType()));
LOG.verbose("Creating preprocessed InferCapture build rule %s for %s", target, source);
DepsBuilder depsBuilder = new DepsBuilder(getRuleFinder());
depsBuilder.add(requireAggregatedPreprocessDepsRule());
PreprocessorDelegateCacheValue preprocessorDelegateValue = preprocessorDelegates.getUnchecked(
PreprocessorDelegateCacheKey.of(source.getType(), source.getFlags()));
depsBuilder.add(preprocessorDelegateValue.getPreprocessorDelegate());
CxxToolFlags ppFlags =
CxxToolFlags.copyOf(
CxxSourceTypes.getPlatformPreprocessFlags(getCxxPlatform(), source.getType()),
preprocessorFlags.getUnchecked(source.getType()));
CxxToolFlags cFlags = computeCompilerFlags(source.getType(), source.getFlags());
depsBuilder.add(source);
CxxInferCapture result = new CxxInferCapture(
getParams().copyWithChanges(
target,
Suppliers.ofInstance(depsBuilder.build()),
Suppliers.ofInstance(ImmutableSortedSet.of())),
ppFlags,
cFlags,
source.getPath(),
source.getType(),
getCompileOutputPath(target, name),
preprocessorDelegateValue.getPreprocessorDelegate(),
inferConfig,
getCxxPlatform().getCompilerDebugPathSanitizer());
getResolver().addToIndex(result);
return result;
}
/**
* @return a {@link CxxPreprocessAndCompile} rule that preprocesses, compiles, and assembles the
* given {@link CxxSource}.
*/
@VisibleForTesting
public CxxPreprocessAndCompile createPreprocessAndCompileBuildRule(
String name,
CxxSource source) {
BuildTarget target = createCompileBuildTarget(name);
LOG.verbose("Creating preprocess and compile %s for %s", target, source);
Preconditions.checkArgument(CxxSourceTypes.isPreprocessableType(source.getType()));
DepsBuilder depsBuilder = new DepsBuilder(getRuleFinder());
depsBuilder.add(requireAggregatedPreprocessDepsRule());
CompilerDelegate compilerDelegate =
new CompilerDelegate(
getPathResolver(),
getCxxPlatform().getCompilerDebugPathSanitizer(),
CxxSourceTypes.getCompiler(
getCxxPlatform(),
CxxSourceTypes.getPreprocessorOutputType(source.getType()))
.resolve(getResolver()),
computeCompilerFlags(source.getType(), source.getFlags()));
depsBuilder.add(compilerDelegate);
PreprocessorDelegateCacheValue preprocessorDelegateValue = preprocessorDelegates.getUnchecked(
PreprocessorDelegateCacheKey.of(source.getType(), source.getFlags()));
PreprocessorDelegate preprocessorDelegate = preprocessorDelegateValue.getPreprocessorDelegate();
depsBuilder.add(preprocessorDelegate);
depsBuilder.add(source);
Preprocessor preprocessor = preprocessorDelegate.getPreprocessor();
if (getPrecompiledHeader().isPresent() &&
!canUsePrecompiledHeaders(getCxxBuckConfig(), preprocessor, source.getType())) {
throw new HumanReadableException(
"Precompiled header was requested for rule \"" +
this.getParams().getBuildTarget().toString() +
"\", but PCH's are not possible under " +
"the current environment (preprocessor/compiler, source file's language, " +
"and/or 'cxx.pch_enabled' option).");
}
Optional<CxxPrecompiledHeader> precompiledHeaderRule = Optional.empty();
if (canUsePrecompiledHeaders(getCxxBuckConfig(), preprocessor, source.getType()) &&
(getPrefixHeader().isPresent() || getPrecompiledHeader().isPresent())) {
precompiledHeaderRule = Optional.of(
requirePrecompiledHeaderBuildRule(
preprocessorDelegateValue,
source.getType(),
source.getFlags()));
depsBuilder.add(precompiledHeaderRule.get());
if (getPrecompiledHeader().isPresent()) {
// For a precompiled header (and not a prefix header), we may need extra include paths.
// The PCH build might have involved some deps that this rule does not have, so we
// would need to pull in its include paths to ensure any includes that happen during this
// build play out the same way as they did for the PCH.
try {
preprocessorDelegate = preprocessorDelegate.withLeadingIncludePaths(
precompiledHeaderRule.get().getCxxIncludePaths());
} catch (PreprocessorDelegate.ConflictingHeadersException e) {
throw e.getHumanReadableExceptionForBuildTarget(getParams().getBuildTarget());
}
}
}
// Build the CxxCompile rule and add it to our sorted set of build rules.
CxxPreprocessAndCompile result = CxxPreprocessAndCompile.preprocessAndCompile(
getParams().copyWithChanges(
target,
Suppliers.ofInstance(depsBuilder.build()),
Suppliers.ofInstance(ImmutableSortedSet.of())),
preprocessorDelegate,
compilerDelegate,
getCompileOutputPath(target, name),
source.getPath(),
source.getType(),
precompiledHeaderRule,
getCxxPlatform().getCompilerDebugPathSanitizer(),
getCxxPlatform().getAssemblerDebugPathSanitizer(),
getSandboxTree());
getResolver().addToIndex(result);
return result;
}
@VisibleForTesting
CxxPreprocessAndCompile requirePreprocessAndCompileBuildRule(String name, CxxSource source) {
BuildTarget target = createCompileBuildTarget(name);
Optional<CxxPreprocessAndCompile> existingRule = getResolver().getRuleOptionalWithType(
target, CxxPreprocessAndCompile.class);
if (existingRule.isPresent()) {
if (!existingRule.get().getInput().equals(source.getPath())) {
throw new RuntimeException(
String.format("Hash collision for %s; a build rule would have been ignored.", name));
}
return existingRule.get();
}
return createPreprocessAndCompileBuildRule(name, source);
}
/**
* Look up or build a precompiled header build rule which this build rule is requesting.
*
* <p>
* The PCH is requested either via a {@code prefix_header='<em>pathToHeaderFileOrTarget</em>'},
* transparently converting the prefix header to a precompiled header, or a precompiled header
* requested with {@code precompiled_header='<em>//:ruleToPCHTemplate</em>'}.
* </p>
*
* <p>
* Compilers only accept precompiled headers generated with the same flags and language options.
* As such, each prefix header may generate multiple pch files, and need unique build targets
* to be differentiated in the build graph.
* </p>
*
* <p>The {@code sourceType} and {@code sourceFlags} come from one of the source in the rule
* which is using the PCH. This is so we can obtain certain flags (language options and such)
* so the PCH is compatible with the rule requesting it.
* </p>
*
* @param preprocessorDelegateCacheValue
* @param sourceType
* @param sourceFlags
*/
private CxxPrecompiledHeader requirePrecompiledHeaderBuildRule(
PreprocessorDelegateCacheValue preprocessorDelegateCacheValue,
CxxSource.Type sourceType,
ImmutableList<String> sourceFlags) {
// This method is called only if one of these is present; guarantee that for the if/else below.
Preconditions.checkState(getPrefixHeader().isPresent() ^ getPrecompiledHeader().isPresent());
return getPrefixHeader().isPresent()
? buildPrecompiledHeaderFromPrefixHeader(
preprocessorDelegateCacheValue,
sourceType,
sourceFlags,
getPrefixHeader().get())
: buildPrecompiledHeaderFromTemplateRule(
preprocessorDelegateCacheValue,
sourceType,
sourceFlags,
getPrecompiledHeader().get());
}
private CxxPrecompiledHeader buildPrecompiledHeaderFromPrefixHeader(
PreprocessorDelegateCacheValue preprocessorDelegateCacheValue,
CxxSource.Type sourceType,
ImmutableList<String> sourceFlags,
SourcePath headerPath) {
DepsBuilder depsBuilder = new DepsBuilder(getRuleFinder());
// We need the preprocessor deps for this rule, for its prefix header.
depsBuilder.add(preprocessorDelegateCacheValue.getPreprocessorDelegate());
depsBuilder.add(requireAggregatedPreprocessDepsRule());
CxxToolFlags compilerFlags = computeCompilerFlags(sourceType, sourceFlags);
// Language needs to be part of the key, PCHs built under a different language are incompatible.
// (Replace `c++` with `cxx`; avoid default scrubbing which would make it the cryptic `c__`.)
final String langCode = sourceType.getLanguage().replaceAll("c\\+\\+", "cxx");
final String pchBaseID =
"pch-" + langCode + "-" + preprocessorDelegateCacheValue.getBaseHash(compilerFlags);
final String pchFullID =
pchBaseID + "-" + preprocessorDelegateCacheValue.getFullHash(compilerFlags);
return buildPrecompiledHeader(
preprocessorDelegateCacheValue.getPreprocessorDelegate(),
sourceType,
compilerFlags,
headerPath,
depsBuilder,
getParams().getBuildTarget().getUnflavoredBuildTarget(),
ImmutableSortedSet.of(
getCxxPlatform().getFlavor(),
ImmutableFlavor.of(Flavor.replaceInvalidCharacters(pchFullID))));
}
/**
* Build a PCH rule, given a {@code cxx_precompiled_header} rule.
*
* <p>
* We'll "instantiate" this PCH from this template, using the parameters (src, dependencies)
* from the template itself, plus the build flags that are used in the current build rule
* (so that this instantiated version uses compatible build flags and thus the PCH is guaranteed
* usable with this rule).
* </p>
*/
private CxxPrecompiledHeader buildPrecompiledHeaderFromTemplateRule(
PreprocessorDelegateCacheValue preprocessorDelegateCacheValue,
CxxSource.Type sourceType,
ImmutableList<String> sourceFlags,
SourcePath headerTargetPath) {
DepsBuilder depsBuilder = new DepsBuilder(getRuleFinder());
PreprocessorDelegate preprocessorDelegateForCxxRule =
preprocessorDelegateCacheValue.getPreprocessorDelegate();
Preprocessor preprocessor = preprocessorDelegateForCxxRule.getPreprocessor();
BuildTarget pchTemplateTarget = ((BuildTargetSourcePath<?>) headerTargetPath).getTarget();
Optional<CxxPrecompiledHeaderTemplate> pchTemplateRuleOpt =
getResolver().getRuleOptionalWithType(
pchTemplateTarget,
CxxPrecompiledHeaderTemplate.class);
Preconditions.checkState(pchTemplateRuleOpt.isPresent());
CxxPrecompiledHeaderTemplate pchTemplate = pchTemplateRuleOpt.get();
// Build compiler flags, taking from the source rule, but leaving out its deps.
// We just need the flags pertaining to PCH compatibility: language, PIC, macros, etc.
// and nothing related to the deps of this particular rule (hence 'getNonIncludePathFlags').
CxxToolFlags compilerFlags = CxxToolFlags.concat(
preprocessorDelegateForCxxRule.getNonIncludePathFlags(/* no pch */ Optional.empty()),
computeCompilerFlags(sourceType, sourceFlags));
// Now build a new pp-delegate specially for this PCH rule.
PreprocessorDelegate preprocessorDelegate =
pchTemplate.buildPreprocessorDelegate(
getCxxPlatform(),
preprocessor,
compilerFlags);
// Language needs to be part of the key, PCHs built under a different language are incompatible.
// (Replace `c++` with `cxx`; avoid default scrubbing which would make it the cryptic `c__`.)
final String langCode = sourceType.getLanguage().replaceAll("c\\+\\+", "cxx");
final String pchBaseID =
"pch-" + langCode + "-" + preprocessorDelegateCacheValue.getBaseHash(compilerFlags);
for (BuildRule rule : pchTemplate.getDeps()) {
depsBuilder.add(rule);
}
depsBuilder.add(pchTemplate.requireAggregatedDepsRule(getCxxPlatform()));
depsBuilder.add(preprocessorDelegate);
return buildPrecompiledHeader(
preprocessorDelegate,
sourceType,
compilerFlags,
pchTemplate.sourcePath,
depsBuilder,
pchTemplateTarget.getUnflavoredBuildTarget(),
ImmutableSortedSet.of(
getCxxPlatform().getFlavor(),
ImmutableFlavor.of(Flavor.replaceInvalidCharacters(pchBaseID))));
}
/**
* Look up or build a precompiled header build rule which this build rule is requesting.
*
* <p>
* This method will first try to determine whether a matching PCH was already created;
* if so, it will be reused. This is done by searching the cache in the {@link BuildRuleResolver}
* owned by this class. If this ends up building a new instance of {@link CxxPrecompiledHeader},
* it will be added to the resolver cache.
* </p>
*/
private CxxPrecompiledHeader buildPrecompiledHeader(
PreprocessorDelegate preprocessorDelegate,
CxxSource.Type sourceType,
CxxToolFlags compilerFlags,
SourcePath headerPath,
DepsBuilder depsBuilder,
UnflavoredBuildTarget templateTarget,
ImmutableSortedSet<Flavor> flavors) {
BuildTarget target = BuildTarget
.builder(templateTarget)
.addAllFlavors(flavors)
.build();
Optional<CxxPrecompiledHeader> existingRule =
getResolver().getRuleOptionalWithType(target, CxxPrecompiledHeader.class);
if (existingRule.isPresent()) {
return existingRule.get();
}
// Give the PCH a filename that looks like a header file with .gch appended to it, GCC-style.
// GCC accepts an "-include" flag with the .h file as its arg, and auto-appends ".gch" to
// automagically use the precompiled header in place of the original header. Of course in
// our case we'll only have the ".gch" file, which is alright; the ".h" isn't truly needed.
Path output = BuildTargets.getGenPath(getParams().getProjectFilesystem(), target, "%s.h.gch");
CompilerDelegate compilerDelegate =
new CompilerDelegate(
getPathResolver(),
getCxxPlatform().getCompilerDebugPathSanitizer(),
CxxSourceTypes.getCompiler(
getCxxPlatform(),
CxxSourceTypes.getPreprocessorOutputType(sourceType))
.resolve(getResolver()),
compilerFlags);
depsBuilder.add(compilerDelegate);
depsBuilder.add(headerPath);
BuildRuleParams params =
getParams().copyWithChanges(
target,
Suppliers.ofInstance(depsBuilder.build()),
Suppliers.ofInstance(ImmutableSortedSet.of()));
CxxPrecompiledHeader rule = new CxxPrecompiledHeader(
params,
output,
preprocessorDelegate,
compilerDelegate,
compilerFlags,
headerPath,
sourceType,
getCxxPlatform().getCompilerDebugPathSanitizer(),
getCxxPlatform().getAssemblerDebugPathSanitizer());
getResolver().addToIndex(rule);
return rule;
}
public ImmutableSet<CxxInferCapture> requireInferCaptureBuildRules(
ImmutableMap<String, CxxSource> sources,
InferBuckConfig inferConfig,
CxxInferSourceFilter sourceFilter) {
ImmutableSet.Builder<CxxInferCapture> objects = ImmutableSet.builder();
for (Map.Entry<String, CxxSource> entry : sources.entrySet()) {
String name = entry.getKey();
CxxSource source = entry.getValue();
Preconditions.checkState(
CxxSourceTypes.isPreprocessableType(source.getType()),
"Only preprocessable source types are currently supported");
if (sourceFilter.isBlacklisted(source)) {
continue;
}
CxxInferCapture rule = requireInferCaptureBuildRule(
name,
source,
inferConfig);
objects.add(rule);
}
return objects.build();
}
@VisibleForTesting
ImmutableMap<CxxPreprocessAndCompile, SourcePath> requirePreprocessAndCompileRules(
ImmutableMap<String, CxxSource> sources) {
return sources.entrySet().stream()
.map(entry -> {
String name = entry.getKey();
CxxSource source = entry.getValue();
Preconditions.checkState(
CxxSourceTypes.isPreprocessableType(source.getType()) ||
CxxSourceTypes.isCompilableType(source.getType()));
source = getSandboxedCxxSource(source);
// If it's a preprocessable source, use a combine preprocess-and-compile build rule.
// Otherwise, use a regular compile rule.
if (CxxSourceTypes.isPreprocessableType(source.getType())) {
return requirePreprocessAndCompileBuildRule(name, source);
} else {
return requireCompileBuildRule(name, source);
}
})
.collect(MoreCollectors.toImmutableMap(
Function.identity(),
CxxPreprocessAndCompile::getSourcePathToOutput));
}
private CxxSource getSandboxedCxxSource(CxxSource source) {
if (getSandboxTree().isPresent()) {
SymlinkTree sandboxTree = getSandboxTree().get();
Path sourcePath = Paths.get(
getPathResolver().getSourcePathName(
getParams().getBuildTarget(),
source.getPath()));
Path sandboxPath = BuildTargets.getGenPath(
getParams().getProjectFilesystem(), sandboxTree.getBuildTarget(), "%s");
ExplicitBuildTargetSourcePath path =
new ExplicitBuildTargetSourcePath(
sandboxTree.getBuildTarget(),
sandboxPath.resolve(sourcePath));
source = CxxSource.copyOf(source).withPath(path);
}
return source;
}
/**
* Can PCH headers be used with the current configuration and type of compiler?
*/
@VisibleForTesting
boolean canUsePrecompiledHeaders(
CxxBuckConfig cxxBuckConfig,
Preprocessor preprocessor,
CxxSource.Type sourceType) {
return
cxxBuckConfig.isPCHEnabled() &&
preprocessor.supportsPrecompiledHeaders() &&
sourceType.getPrecompiledHeaderLanguage().isPresent();
}
public static ImmutableMap<CxxPreprocessAndCompile, SourcePath> requirePreprocessAndCompileRules(
BuildRuleParams params,
BuildRuleResolver resolver,
SourcePathResolver pathResolver,
SourcePathRuleFinder ruleFinder,
CxxBuckConfig cxxBuckConfig,
CxxPlatform cxxPlatform,
ImmutableList<CxxPreprocessorInput> cxxPreprocessorInput,
ImmutableMultimap<CxxSource.Type, String> compilerFlags,
Optional<SourcePath> prefixHeader,
Optional<SourcePath> precompiledHeader,
ImmutableMap<String, CxxSource> sources,
PicType pic,
Optional<SymlinkTree> sandboxTree) {
CxxSourceRuleFactory factory = CxxSourceRuleFactory.of(
params,
resolver,
pathResolver,
ruleFinder,
cxxBuckConfig,
cxxPlatform,
cxxPreprocessorInput,
compilerFlags,
prefixHeader,
precompiledHeader,
pic,
sandboxTree);
return factory.requirePreprocessAndCompileRules(sources);
}
public enum PicType {
// Generate position-independent code (e.g. for use in shared libraries).
PIC {
@Override
public ImmutableList<String> getFlags(Compiler compiler) {
return compiler.getPicFlags();
}
},
// Generate position-dependent code.
PDC {
@Override
public ImmutableList<String> getFlags(Compiler compiler) {
return compiler.getPdcFlags();
}
};
abstract ImmutableList<String> getFlags(Compiler compiler);
}
@Value.Immutable
@BuckStyleTuple
interface AbstractPreprocessorDelegateCacheKey {
CxxSource.Type getSourceType();
ImmutableList<String> getSourceFlags();
}
@VisibleForTesting
public ImmutableList<String> getFlagsForSource(CxxSource source, boolean allowIncludePathFlags) {
PreprocessorDelegateCacheValue preprocessorDelegateValue =
preprocessorDelegates.getUnchecked(
PreprocessorDelegateCacheKey.of(source.getType(), source.getFlags()));
CxxToolFlags flags = computeCompilerFlags(source.getType(), source.getFlags());
PreprocessorDelegateCacheValue.HashStrings hashStrings = preprocessorDelegateValue.get(flags);
return allowIncludePathFlags ? hashStrings.fullFlags : hashStrings.baseFlags;
}
static class PreprocessorDelegateCacheValue {
private final PreprocessorDelegate preprocessorDelegate;
private final LoadingCache<CxxToolFlags, HashStrings> commandHashCache;
class HashStrings {
/** List of build flags (as strings), except for those related to header search paths. */
public final ImmutableList<String> baseFlags;
/** Complete list of all build flags (as strings), including header search paths. */
public final ImmutableList<String> fullFlags;
public final String baseHash;
public final String fullHash;
public HashStrings(CxxToolFlags compilerFlags) {
ImmutableList.Builder<String> builder = ImmutableList.<String>builder();
// Add the build command itself first
builder.addAll(preprocessorDelegate.getCommandPrefix());
// Then preprocessor + compiler args, not including include path args like -I, -isystem, ...
builder.addAll(preprocessorDelegate.getNonIncludePathFlags(Optional.empty()).getAllFlags());
builder.addAll(compilerFlags.getAllFlags());
// Output what we have so far, to this list, then hash it.
this.baseFlags = builder.build();
this.baseHash = preprocessorDelegate.hashCommand(this.baseFlags).substring(0, 10);
// Continue building. Using the same builder; add header search paths, to the above flags.
builder.addAll(preprocessorDelegate.getIncludePathFlags().getAllFlags());
// Output this super-set of flags to this list, then hash it.
this.fullFlags = builder.build();
this.fullHash = preprocessorDelegate.hashCommand(this.fullFlags).substring(0, 10);
}
}
PreprocessorDelegateCacheValue(PreprocessorDelegate preprocessorDelegate) {
this.preprocessorDelegate = preprocessorDelegate;
this.commandHashCache = CacheBuilder.newBuilder()
.build(new CacheLoader<CxxToolFlags, HashStrings>() {
@Override
public HashStrings load(CxxToolFlags key) {
// Note: this hash call is mainly for the benefit of precompiled headers, to produce
// the PCH's hash of build flags. (Since there's no PCH yet, the PCH argument is
// passed as empty here.)
return new HashStrings(key);
}
});
}
PreprocessorDelegate getPreprocessorDelegate() {
return preprocessorDelegate;
}
@VisibleForTesting
public HashStrings get(CxxToolFlags flags) {
return this.commandHashCache.getUnchecked(flags);
}
String getBaseHash(CxxToolFlags flags) {
return get(flags).baseHash;
}
String getFullHash(CxxToolFlags flags) {
return get(flags).fullHash;
}
}
private class PreprocessorDelegateCacheLoader
extends CacheLoader<PreprocessorDelegateCacheKey, PreprocessorDelegateCacheValue> {
@Override
public PreprocessorDelegateCacheValue load(@Nonnull PreprocessorDelegateCacheKey key) {
Preprocessor preprocessor =
CxxSourceTypes.getPreprocessor(getCxxPlatform(), key.getSourceType())
.resolve(getResolver());
try {
PreprocessorDelegate delegate = new PreprocessorDelegate(
getPathResolver(),
getCxxPlatform().getCompilerDebugPathSanitizer(),
getCxxPlatform().getHeaderVerification(),
getParams().getProjectFilesystem().getRootPath(),
preprocessor,
PreprocessorFlags.of(
getPrefixHeader(),
computePreprocessorFlags(key.getSourceType(), key.getSourceFlags()),
getIncludes(),
getFrameworks()),
CxxDescriptionEnhancer.frameworkPathToSearchPath(getCxxPlatform(), getPathResolver()),
getSandboxTree(),
/* leadingIncludePaths */ Optional.empty());
return new PreprocessorDelegateCacheValue(delegate);
} catch (PreprocessorDelegate.ConflictingHeadersException e) {
throw e.getHumanReadableExceptionForBuildTarget(getParams().getBuildTarget());
}
}
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.uuf.spi;
import org.wso2.carbon.uuf.internal.util.UriUtils;
import java.util.Map;
/**
* Represent a HTTP request.
*/
public interface HttpRequest {
String HEADER_CONTENT_TYPE = "Content-Type";
String HEADER_CONTENT_LENGTH = "Content-Length";
/**
* Returns the name of the HTTP method with which this request was made, for example, GET, POST.
*
* @return a {@code String} specifying the name of the method with which this request was made
*/
String getMethod();
/**
* Returns the name and version of the protocol the request uses in the form <i>protocol/majorVersion
* .minorVersion</i>, for example, HTTP/1.1.
*
* @return a {@code String} containing the protocol name and version number
*/
String getProtocol();
/**
* Returns a boolean indicating whether this request was made using a secure channel, i.e. HTTPS.
*
* @return {@code true} when HTTPS, otherwise {@code false}
*/
boolean isSecure();
/**
* Returns the name of the scheme used to make this request.
*
* @return either "{@code http}" or "{@code https}"
*/
default String getScheme() {
return (isSecure() ? "https" : "http");
}
/**
* Reconstructs the URL the client used to make the request.
*
* @return request url
*/
String getUrl();
/**
* Returns the part of this request's URL from the protocol name up to the query string in the first line of the
* HTTP request. For example:
* <br><br>
* <table summary="Examples of Returned Values"> <tr align=left> <th>First line of HTTP request</th> <th>Returned
* Value</th> </tr> <tr> <td>GET http://foo.bar/a/b.html HTTP/1.0</td> <td>/a/b.html</td> </tr> <tr> <td>GET
* https://192.168.1.1:9292/foo/bar?x=y HTTP/1.1</td> <td>/foo/bar</td> </tr> <tr> <td>POST /some/path.html
* HTTP/1.1</td> <td>/some/path.html</td> </tr> </table>
* <br>
* Returned URI string is decoded.
*
* @return a decoded {@code String} containing the part of the URL from the protocol name up to the query string
*/
String getUri();
/**
* Returns the part of this request's URI from the first forward slash up to the second forward slash but not
* including. For example.
* <br><br>
* <table summary="Examples of Returned Values"> <tr align=left> <th>URI</th> <th>Returned Value</th> </tr> <tr>
* <td>/a/b.html</td> <td>/a</td> </tr> <tr> <td>/foo/bar?x=y</td> <td>/foo</td> </tr> <tr> <td>/some/path.html</td>
* <td>/some</td> </tr> </table>
*
* @return a {@code String} containing the part of the URI from the first forward slash up to the second forward
* slash
* @see #getUri()
*/
String getContextPath();
/**
* Returns the part of this request's URI from the second forward slash to the end. For example:
* <br><br>
* <table summary="Examples of Returned Values"> <tr align=left> <th>URI</th> <th>Returned Value</th> </tr> <tr>
* <td>/a/b.html</td> <td>/b.html</td> </tr> <tr> <td>/foo/bar?x=y</td> <td>/bar</td> </tr> <tr>
* <td>/some/path.html</td> <td>/path.html</td> </tr> </table>
*
* @return a {@code String} that contains the remaining of the URI after removing the context path from it
* @see #getUri()
* @see #getContextPath()
*/
String getUriWithoutContextPath();
/**
* Returns the query string that is contained in the request URL after the path. This method returns {@code null} if
* the URL does not have a query string.
*
* @return an un-decoded {@code String} containing the query string or {@code null} if the URL contains no query
* string.
* @see #getUri()
*/
String getQueryString();
/**
* Returns query parameters of this request. All keys and values of the returned map are decoded.
*
* @return a map containing parameter names as keys and parameter values as map values
*/
Map<String, Object> getQueryParams();
/**
* Returns all HTTP headers of this request.
*
* @return HTTP headers
*/
Map<String, String> getHeaders();
/**
* Retuns the value of the specified Cookie.
*
* @param cookieName name of the Cookie
* @return value of the Cookie or {@code null} if a Cookie with the specified name doesn't exist
*/
String getCookieValue(String cookieName);
/**
* Returns the MIME type of the body of the request, or {@code null} if the type is not known. Return value is
* computed from the "Content-Type" HTTP header.
*
* @return a {@code String} containing the name of the MIME type of the request, or {@code null} if the type is not
* known
*/
String getContentType();
/**
* Returns the length, in bytes, of the request body or -1 if the length is not known. Return value is computed from
* the "Content-Length" HTTP header.
*
* @return the length of the request body or -1 if the length is not known
*/
long getContentLength();
Map<String, Object> getFormParams();
Map<String, Object> getFiles();
/**
* Returns the Internet Protocol (IP) address of the interface on which the request was received.
*
* @return a {@code String} containing the IP address on which the request was received.
*/
String getLocalAddress();
/**
* Returns the Internet Protocol (IP) port number of the interface on which the request was received.
*
* @return an integer specifying the port number
*/
int getLocalPort();
/**
* Returns the Internet Protocol (IP) address of the client or last proxy that sent the request.
*
* @return a <code>String</code> containing the IP address of the client that sent the request
*/
String getRemoteAddress();
/**
* Returns the Internet Protocol (IP) source port of the client or last proxy that sent the request.
*
* @return an integer specifying the port number
*/
int getRemotePort();
String toString();
default boolean isValid() {
String uri = getUri();
// An URI must begin with '/' & it should have at least two characters.
if ((uri.length() < 2) || (uri.charAt(0) != '/')) {
return false;
}
// '//' or '..' are not allowed in URIs.
boolean isPreviousCharInvalid = false;
for (int i = 0; i < uri.length(); i++) {
char currentChar = uri.charAt(i);
if ((currentChar == '/') || (currentChar == '.')) {
if (isPreviousCharInvalid) {
return false;
} else {
isPreviousCharInvalid = true;
}
} else {
isPreviousCharInvalid = false;
}
}
return true;
}
default boolean isStaticResourceRequest() {
return getUriWithoutContextPath().startsWith("/public/");
}
default boolean isComponentStaticResourceRequest() {
return getUriWithoutContextPath().startsWith(UriUtils.COMPONENT_STATIC_RESOURCES_URI_PREFIX);
}
default boolean isThemeStaticResourceRequest() {
return getUriWithoutContextPath().startsWith(UriUtils.THEMES_STATIC_RESOURCES_URI_PREFIX);
}
default boolean isDebugRequest() {
return getUriWithoutContextPath().startsWith("/debug/");
}
default boolean isFragmentRequest() {
return getUriWithoutContextPath().startsWith(UriUtils.FRAGMENTS_URI_PREFIX);
}
default boolean isDefaultFaviconRequest() {
return getUri().equals("/favicon.ico");
}
static String getContextPath(String uri) {
int secondSlash = uri.indexOf('/', 1); // An URI must start with a slash.
if (secondSlash == -1) {
// There is only one slash in the URI.
return uri;
} else {
return uri.substring(0, secondSlash);
}
}
static String getUriWithoutContextPath(String uri) {
int secondSlash = uri.indexOf('/', 1); // An URI must start with a slash.
if (secondSlash == -1) {
// There is only one slash in the URI.
return "";
} else {
return uri.substring(secondSlash, uri.length());
}
}
}
| |
package io.cattle.platform.process.common.handler;
import io.cattle.platform.engine.handler.HandlerResult;
import io.cattle.platform.engine.process.ProcessInstance;
import io.cattle.platform.engine.process.ProcessState;
import io.cattle.platform.eventing.EventCallOptions;
import io.cattle.platform.eventing.EventProgress;
import io.cattle.platform.eventing.EventService;
import io.cattle.platform.eventing.model.Event;
import io.cattle.platform.eventing.model.EventVO;
import io.cattle.platform.object.ObjectManager;
import io.cattle.platform.object.meta.ObjectMetaDataManager;
import io.cattle.platform.object.process.ObjectProcessManager;
import io.cattle.platform.object.util.ObjectUtils;
import io.cattle.platform.util.exception.ExecutionException;
import io.cattle.platform.util.type.CollectionUtils;
import io.cattle.platform.util.type.NamedUtils;
import io.cattle.platform.util.type.Priority;
import java.util.HashMap;
import java.util.Map;
import javax.inject.Inject;
import org.apache.commons.lang.StringUtils;
import org.jooq.exception.DataChangedException;
public class EventBasedProcessHandler extends AbstractObjectProcessHandler implements Priority {
public static String DEFAULT_NAME = "EventBased";
EventService eventService;
String[] processNames;
String eventName;
Integer retry;
Long timeoutMillis;
String onError;
int priority = Priority.SPECIFIC;
public EventBasedProcessHandler(EventService eventService, ObjectManager objectManager, ObjectProcessManager objectProcessManager,
ObjectMetaDataManager objectMetaDataManager) {
this();
this.eventService = eventService;
this.objectManager = objectManager;
this.objectProcessManager = objectProcessManager;
this.objectMetaDataManager = objectMetaDataManager;
}
public EventBasedProcessHandler() {
if (this.getClass() == EventBasedProcessHandler.class) {
setName(DEFAULT_NAME);
}
}
@Override
public String[] getProcessNames() {
if (DEFAULT_NAME.equals(getName())) {
return new String[0];
}
if (processNames == null) {
return new String[] { NamedUtils.toDotSeparated(getName()) };
}
return processNames;
}
@Override
public HandlerResult handle(ProcessState state, ProcessInstance process) {
final Object resource = state.getResource();
String type = objectManager.getType(resource);
if (type == null) {
type = resource.getClass().getName();
}
String idString = null;
Object id = ObjectUtils.getId(resource);
if (id != null) {
idString = id.toString();
}
String eventName = getEventName() == null ? process.getName() : getEventName();
Event request = EventVO.newEvent(eventName).withResourceId(idString).withResourceType(type).withData(state.getData());
EventCallOptions options = new EventCallOptions(retry, timeoutMillis);
options.setProgressIsKeepAlive(true);
options.setProgress(new EventProgress() {
@Override
public void progress(Event event) {
Map<String, Object> data = new HashMap<String, Object>();
String transitioning = event.getTransitioningMessage();
Integer progress = event.getTransitioningProgress();
if (transitioning != null) {
data.put(ObjectMetaDataManager.TRANSITIONING_MESSAGE_FIELD, transitioning);
}
if (progress != null) {
data.put(ObjectMetaDataManager.TRANSITIONING_PROGRESS_FIELD, progress);
}
if (data.size() > 0) {
DataChangedException dce = null;
for (int i = 0; i < 3 ; i++) {
try {
Object reloaded = objectManager.reload(resource);
objectManager.setFields(reloaded, data);
dce = null;
break;
} catch (DataChangedException e) {
dce = e;
}
}
if (dce != null) {
throw dce;
}
}
}
});
try {
Event response = eventService.callSync(request, options);
return postEvent(state, process, CollectionUtils.toMap(response.getData()));
} catch (ExecutionException e) {
if (!StringUtils.isEmpty(getOnError())) {
objectProcessManager.scheduleProcessInstance(getOnError(), state.getResource(), state.getData());
e.setResources(state.getResource());
}
throw e;
}
}
protected HandlerResult postEvent(ProcessState state, ProcessInstance process, Map<Object, Object> data) {
return new HandlerResult(data);
}
public EventService getEventService() {
return eventService;
}
@Inject
public void setEventService(EventService eventService) {
this.eventService = eventService;
}
@Override
public ObjectManager getObjectManager() {
return objectManager;
}
@Override
@Inject
public void setObjectManager(ObjectManager objectManager) {
this.objectManager = objectManager;
}
public void setProcessNames(String[] processNames) {
this.processNames = processNames;
}
public String getEventName() {
return eventName;
}
public void setEventName(String eventName) {
this.eventName = eventName;
}
public Integer getRetry() {
return retry;
}
public void setRetry(Integer retry) {
this.retry = retry;
}
public Long getTimeoutMillis() {
return timeoutMillis;
}
public void setTimeoutMillis(Long timeoutMillis) {
this.timeoutMillis = timeoutMillis;
}
@Override
public int getPriority() {
return priority;
}
public void setPriority(int priority) {
this.priority = priority;
}
public String getOnError() {
return this.onError;
}
public void setOnError(String onError) {
this.onError = onError;
}
}
| |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Populates data fields from Android contacts profile API (i.e. "me" contact).
package org.chromium.components.browser.autofill;
import android.app.Activity;
import android.content.ContentProviderOperation;
import android.content.ContentResolver;
import android.content.Context;
import android.content.OperationApplicationException;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.database.DatabaseUtils;
import android.net.Uri;
import android.os.Bundle;
import android.os.RemoteException;
import android.provider.ContactsContract.CommonDataKinds.Phone;
import android.provider.ContactsContract.Profile;
import android.provider.ContactsContract;
import android.util.Log;
import android.view.View.OnClickListener;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import org.chromium.base.CalledByNative;
import org.chromium.base.JNINamespace;
import java.util.ArrayList;
/**
* Loads user profile information stored under the "Me" contact.
* Requires permissions: READ_CONTACTS and READ_PROFILE.
*/
@JNINamespace("autofill")
public class PersonalAutofillPopulator {
/**
* SQL query definitions for obtaining specific profile information.
*/
private abstract static class ProfileQuery {
Uri profileDataUri = Uri.withAppendedPath(
ContactsContract.Profile.CONTENT_URI,
ContactsContract.Contacts.Data.CONTENT_DIRECTORY
);
public abstract String[] projection();
public abstract String mimeType();
}
private static class EmailProfileQuery extends ProfileQuery {
private static final int EMAIL_ADDRESS = 0;
@Override
public String[] projection() {
return new String[] {
ContactsContract.CommonDataKinds.Email.ADDRESS,
};
}
@Override
public String mimeType() {
return ContactsContract.CommonDataKinds.Email.CONTENT_ITEM_TYPE;
}
}
private static class PhoneProfileQuery extends ProfileQuery {
private static final int NUMBER = 0;
@Override
public String[] projection() {
return new String[] {
ContactsContract.CommonDataKinds.Phone.NUMBER,
};
}
@Override
public String mimeType() {
return ContactsContract.CommonDataKinds.Phone.CONTENT_ITEM_TYPE;
}
}
private static class AddressProfileQuery extends ProfileQuery {
private static final int STREET = 0;
private static final int POBOX = 1;
private static final int NEIGHBORHOOD = 2;
private static final int CITY = 3;
private static final int REGION = 4;
private static final int POSTALCODE = 5;
private static final int COUNTRY = 6;
@Override
public String[] projection() {
return new String[] {
ContactsContract.CommonDataKinds.StructuredPostal.STREET,
ContactsContract.CommonDataKinds.StructuredPostal.POBOX,
ContactsContract.CommonDataKinds.StructuredPostal.NEIGHBORHOOD,
ContactsContract.CommonDataKinds.StructuredPostal.CITY,
ContactsContract.CommonDataKinds.StructuredPostal.REGION,
ContactsContract.CommonDataKinds.StructuredPostal.POSTCODE,
ContactsContract.CommonDataKinds.StructuredPostal.COUNTRY,
};
}
@Override
public String mimeType() {
return ContactsContract.CommonDataKinds.StructuredPostal.CONTENT_ITEM_TYPE;
}
}
private static class NameProfileQuery extends ProfileQuery {
private static final int GIVEN_NAME = 0;
private static final int MIDDLE_NAME = 1;
private static final int FAMILY_NAME = 2;
private static final int SUFFIX = 3;
@Override
public String[] projection() {
return new String[] {
ContactsContract.CommonDataKinds.StructuredName.GIVEN_NAME,
ContactsContract.CommonDataKinds.StructuredName.MIDDLE_NAME,
ContactsContract.CommonDataKinds.StructuredName.FAMILY_NAME,
ContactsContract.CommonDataKinds.StructuredName.SUFFIX
};
}
@Override
public String mimeType() {
return ContactsContract.CommonDataKinds.StructuredName.CONTENT_ITEM_TYPE;
}
}
/**
* Takes a query object, transforms into actual query and returns cursor.
* Primary contact values will be first.
*/
private Cursor cursorFromProfileQuery(ProfileQuery query, ContentResolver contentResolver) {
String sortDescriptor = ContactsContract.Contacts.Data.IS_PRIMARY + " DESC";
return contentResolver.query(
query.profileDataUri,
query.projection(),
ContactsContract.Contacts.Data.MIMETYPE + " = ?",
new String[]{query.mimeType()},
sortDescriptor
);
}
// Extracted data variables.
private String[] mEmailAddresses;
private String mGivenName;
private String mMiddleName;
private String mFamilyName;
private String mSuffix;
private String mPobox;
private String mStreet;
private String mNeighborhood;
private String mCity;
private String mRegion;
private String mCountry;
private String mPostalCode;
private String[] mPhoneNumbers;
private boolean mHasPermissions;
/**
* Constructor
* @param context a valid android context reference
*/
PersonalAutofillPopulator(Context context) {
mHasPermissions = hasPermissions(context);
if (mHasPermissions) {
ContentResolver contentResolver = context.getContentResolver();
populateName(contentResolver);
populateEmail(contentResolver);
populateAddress(contentResolver);
populatePhone(contentResolver);
}
}
// Check if the user has granted permissions.
private boolean hasPermissions(Context context) {
String [] permissions = {
"android.permission.READ_CONTACTS",
"android.permission.READ_PROFILE"
};
for (String permission : permissions) {
int res = context.checkCallingOrSelfPermission(permission);
if (res != PackageManager.PERMISSION_GRANTED) return false;
}
return true;
}
// Populating data fields.
private void populateName(ContentResolver contentResolver) {
NameProfileQuery nameProfileQuery = new NameProfileQuery();
Cursor nameCursor = cursorFromProfileQuery(nameProfileQuery, contentResolver);
if (nameCursor.moveToNext()) {
mGivenName = nameCursor.getString(nameProfileQuery.GIVEN_NAME);
mMiddleName = nameCursor.getString(nameProfileQuery.MIDDLE_NAME);
mFamilyName = nameCursor.getString(nameProfileQuery.FAMILY_NAME);
mSuffix = nameCursor.getString(nameProfileQuery.SUFFIX);
}
nameCursor.close();
}
private void populateEmail(ContentResolver contentResolver) {
EmailProfileQuery emailProfileQuery = new EmailProfileQuery();
Cursor emailCursor = cursorFromProfileQuery(emailProfileQuery, contentResolver);
mEmailAddresses = new String[emailCursor.getCount()];
for (int i = 0; emailCursor.moveToNext(); i++) {
mEmailAddresses[i] = emailCursor.getString(emailProfileQuery.EMAIL_ADDRESS);
}
emailCursor.close();
}
private void populateAddress(ContentResolver contentResolver) {
AddressProfileQuery addressProfileQuery = new AddressProfileQuery();
Cursor addressCursor = cursorFromProfileQuery(addressProfileQuery, contentResolver);
if(addressCursor.moveToNext()) {
mPobox = addressCursor.getString(addressProfileQuery.POBOX);
mStreet = addressCursor.getString(addressProfileQuery.STREET);
mNeighborhood = addressCursor.getString(addressProfileQuery.NEIGHBORHOOD);
mCity = addressCursor.getString(addressProfileQuery.CITY);
mRegion = addressCursor.getString(addressProfileQuery.REGION);
mPostalCode = addressCursor.getString(addressProfileQuery.POSTALCODE);
mCountry = addressCursor.getString(addressProfileQuery.COUNTRY);
}
addressCursor.close();
}
private void populatePhone(ContentResolver contentResolver) {
PhoneProfileQuery phoneProfileQuery = new PhoneProfileQuery();
Cursor phoneCursor = cursorFromProfileQuery(phoneProfileQuery, contentResolver);
mPhoneNumbers = new String[phoneCursor.getCount()];
for (int i = 0; phoneCursor.moveToNext(); i++) {
mPhoneNumbers[i] = phoneCursor.getString(phoneProfileQuery.NUMBER);
}
phoneCursor.close();
}
/**
* Static factory method for instance creation.
* @param context valid Android context.
* @return PersonalAutofillPopulator new instance of PersonalAutofillPopulator.
*/
@CalledByNative
static PersonalAutofillPopulator create(Context context) {
return new PersonalAutofillPopulator(context);
}
@CalledByNative
private String getFirstName() {
return mGivenName;
}
@CalledByNative
private String getLastName() {
return mFamilyName;
}
@CalledByNative
private String getMiddleName() {
return mMiddleName;
}
@CalledByNative
private String getSuffix() {
return mSuffix;
}
@CalledByNative
private String[] getEmailAddresses() {
return mEmailAddresses;
}
@CalledByNative
private String getStreet() {
return mStreet;
}
@CalledByNative
private String getPobox() {
return mPobox;
}
@CalledByNative
private String getNeighborhood() {
return mNeighborhood;
}
@CalledByNative
private String getCity() {
return mCity;
}
@CalledByNative
private String getRegion() {
return mRegion;
}
@CalledByNative
private String getPostalCode() {
return mPostalCode;
}
@CalledByNative
private String getCountry() {
return mCountry;
}
@CalledByNative
private String[] getPhoneNumbers() {
return mPhoneNumbers;
}
@CalledByNative
private boolean getHasPermissions() {
return mHasPermissions;
}
}
| |
/*
* Copyright 2014 Black Pepper Software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.blackpepper.support.selenium;
import java.util.Collections;
import java.util.List;
import org.junit.Test;
import org.mockito.stubbing.OngoingStubbing;
import org.openqa.selenium.Alert;
import org.openqa.selenium.By;
import org.openqa.selenium.NoAlertPresentException;
import org.openqa.selenium.NoSuchElementException;
import org.openqa.selenium.SearchContext;
import org.openqa.selenium.TimeoutException;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebDriver.TargetLocator;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.ui.ExpectedCondition;
import org.openqa.selenium.support.ui.WebDriverWait;
import static java.util.Arrays.asList;
import static java.util.Collections.singleton;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static uk.co.blackpepper.support.selenium.WebDriverUtils.acceptAlert;
import static uk.co.blackpepper.support.selenium.WebDriverUtils.dismissAlert;
import static uk.co.blackpepper.support.selenium.WebDriverUtils.getText;
import static uk.co.blackpepper.support.selenium.WebDriverUtils.quietFindElement;
import static uk.co.blackpepper.support.selenium.WebDriverUtils.safeFindElement;
import static uk.co.blackpepper.support.selenium.WebDriverUtils.until;
public class WebDriverUtilsTest {
@Test
public void safeFindElementWhenElementFoundReturnsElement() {
WebElement element = mock(WebElement.class);
SearchContext context = mock(SearchContext.class);
when(context.findElement(By.id("x"))).thenReturn(element);
WebElement actual = safeFindElement(context, By.id("x"));
assertThat(actual, is(element));
}
@Test
public void safeFindElementWhenElementNotFoundReturnsNullElement() {
SearchContext context = mock(SearchContext.class);
when(context.findElement(By.id("x"))).thenThrow(new NoSuchElementException("x"));
WebElement actual = safeFindElement(context, By.id("x"));
assertThat(actual, is(instanceOf(NullWebElement.class)));
}
/**
* @deprecated Tests deprecated method.
*/
@Test
@Deprecated
public void quietFindElementWhenElementFoundReturnsElement() {
WebElement element = mock(WebElement.class);
SearchContext context = mock(SearchContext.class);
when(context.findElement(By.id("x"))).thenReturn(element);
WebElement actual = quietFindElement(context, By.id("x"));
assertThat(actual, is(element));
}
/**
* @deprecated Tests deprecated method.
*/
@Test
@Deprecated
public void quietFindElementWhenElementNotFoundReturnsNull() {
SearchContext context = mock(SearchContext.class);
when(context.findElement(By.id("x"))).thenThrow(new NoSuchElementException("x"));
WebElement actual = quietFindElement(context, By.id("x"));
assertThat(actual, is(nullValue()));
}
@Test
public void untilWithConditionSatisfiedReturnsTrue() {
ExpectedCondition<Object> condition = mock(ExpectedCondition.class);
WebDriverWait wait = mock(WebDriverWait.class);
when(wait.until(condition)).thenReturn(new Object());
boolean actual = until(wait, condition);
assertThat(actual, is(true));
}
@Test
public void untilWithConditionUnsatisfiedReturnsFalse() {
ExpectedCondition<Object> condition = mock(ExpectedCondition.class);
WebDriverWait wait = mock(WebDriverWait.class);
when(wait.until(condition)).thenThrow(new TimeoutException());
boolean actual = until(wait, condition);
assertThat(actual, is(false));
}
@Test
public void getTextWithNoElementsReturnsEmptyList() {
List<String> actual = getText(Collections.<WebElement>emptySet());
assertThat(actual, is(empty()));
}
@Test
public void getTextWithElementReturnsText() {
WebElement element = newElementWithText("x");
List<String> actual = getText(singleton(element));
assertThat(actual, contains("x"));
}
@Test
public void getTextWithElementsReturnsText() {
WebElement element1 = newElementWithText("x");
WebElement element2 = newElementWithText("y");
List<String> actual = getText(asList(element1, element2));
assertThat(actual, contains("x", "y"));
}
@Test
public void acceptAlertAcceptsAlert() {
WebDriver driver = mock(WebDriver.class);
Alert alert = mock(Alert.class);
whenSwitchToAlert(driver).thenReturn(alert);
acceptAlert(driver);
verify(alert).accept();
}
@Test(expected = NoAlertPresentException.class)
public void acceptAlertWhenNoAlertThrowsException() {
WebDriver driver = mock(WebDriver.class);
whenSwitchToAlert(driver).thenThrow(new NoAlertPresentException());
acceptAlert(driver);
}
@Test(expected = IllegalStateException.class)
public void acceptAlertWhenWebDriverExceptionThrowsException() {
Alert alert = mock(Alert.class);
doThrow(new WebDriverException()).when(alert).accept();
WebDriver driver = mock(WebDriver.class);
whenSwitchToAlert(driver).thenReturn(alert);
acceptAlert(driver);
}
@Test
public void dismissAlertDismissesAlert() {
WebDriver driver = mock(WebDriver.class);
Alert alert = mock(Alert.class);
whenSwitchToAlert(driver).thenReturn(alert);
dismissAlert(driver);
verify(alert).dismiss();
}
@Test(expected = NoAlertPresentException.class)
public void dismissAlertWhenNoAlertThrowsException() {
WebDriver driver = mock(WebDriver.class);
whenSwitchToAlert(driver).thenThrow(new NoAlertPresentException());
dismissAlert(driver);
}
@Test(expected = IllegalStateException.class)
public void dismissAlertWhenWebDriverExceptionThrowsException() {
Alert alert = mock(Alert.class);
doThrow(new WebDriverException()).when(alert).dismiss();
WebDriver driver = mock(WebDriver.class);
whenSwitchToAlert(driver).thenReturn(alert);
dismissAlert(driver);
}
private static WebElement newElementWithText(String text) {
WebElement element = mock(WebElement.class);
when(element.getText()).thenReturn(text);
return element;
}
private static OngoingStubbing<Alert> whenSwitchToAlert(WebDriver driver) {
TargetLocator targetLocator = mock(TargetLocator.class);
when(driver.switchTo()).thenReturn(targetLocator);
return when(targetLocator.alert());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.snapshot;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.io.HLogLink;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.ExportSnapshotException;
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotReferenceUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
/**
* Export the specified snapshot to a given FileSystem.
*
* The .snapshot/name folder is copied to the destination cluster
* and then all the hfiles/hlogs are copied using a Map-Reduce Job in the .archive/ location.
* When everything is done, the second cluster can restore the snapshot.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public final class ExportSnapshot extends Configured implements Tool {
private static final Log LOG = LogFactory.getLog(ExportSnapshot.class);
private static final String CONF_TMP_DIR = "hbase.tmp.dir";
private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";
private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";
private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";
private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";
private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";
private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";
private static final String INPUT_FOLDER_PREFIX = "export-files.";
// Export Map-Reduce Counters, to keep track of the progress
public enum Counter { MISSING_FILES, COPY_FAILED, BYTES_EXPECTED, BYTES_COPIED };
private static class ExportMapper extends Mapper<Text, NullWritable, NullWritable, NullWritable> {
final static int REPORT_SIZE = 1 * 1024 * 1024;
final static int BUFFER_SIZE = 64 * 1024;
private boolean verifyChecksum;
private String filesGroup;
private String filesUser;
private short filesMode;
private FileSystem outputFs;
private Path outputArchive;
private Path outputRoot;
private FileSystem inputFs;
private Path inputArchive;
private Path inputRoot;
@Override
public void setup(Context context) {
Configuration conf = context.getConfiguration();
verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);
filesGroup = conf.get(CONF_FILES_GROUP);
filesUser = conf.get(CONF_FILES_USER);
filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);
outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));
inputRoot = new Path(conf.get(CONF_INPUT_ROOT));
inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);
outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);
try {
inputFs = FileSystem.get(inputRoot.toUri(), conf);
} catch (IOException e) {
throw new RuntimeException("Could not get the input FileSystem with root=" + inputRoot, e);
}
try {
outputFs = FileSystem.get(outputRoot.toUri(), conf);
} catch (IOException e) {
throw new RuntimeException("Could not get the output FileSystem with root="+ outputRoot, e);
}
}
@Override
public void map(Text key, NullWritable value, Context context)
throws InterruptedException, IOException {
Path inputPath = new Path(key.toString());
Path outputPath = getOutputPath(inputPath);
LOG.info("copy file input=" + inputPath + " output=" + outputPath);
if (copyFile(context, inputPath, outputPath)) {
LOG.info("copy completed for input=" + inputPath + " output=" + outputPath);
}
}
/**
* Returns the location where the inputPath will be copied.
* - hfiles are encoded as hfile links hfile-region-table
* - logs are encoded as serverName/logName
*/
private Path getOutputPath(final Path inputPath) throws IOException {
Path path;
if (HFileLink.isHFileLink(inputPath)) {
String family = inputPath.getParent().getName();
String table = HFileLink.getReferencedTableName(inputPath.getName());
String region = HFileLink.getReferencedRegionName(inputPath.getName());
String hfile = HFileLink.getReferencedHFileName(inputPath.getName());
path = new Path(table, new Path(region, new Path(family, hfile)));
} else if (isHLogLinkPath(inputPath)) {
String logName = inputPath.getName();
path = new Path(new Path(outputRoot, HConstants.HREGION_OLDLOGDIR_NAME), logName);
} else {
path = inputPath;
}
return new Path(outputArchive, path);
}
private boolean copyFile(final Context context, final Path inputPath, final Path outputPath)
throws IOException {
FSDataInputStream in = openSourceFile(inputPath);
if (in == null) {
context.getCounter(Counter.MISSING_FILES).increment(1);
return false;
}
try {
// Verify if the input file exists
FileStatus inputStat = getFileStatus(inputFs, inputPath);
if (inputStat == null) return false;
// Verify if the output file exists and is the same that we want to copy
FileStatus outputStat = getFileStatus(outputFs, outputPath);
if (outputStat != null && sameFile(inputStat, outputStat)) {
LOG.info("Skip copy " + inputPath + " to " + outputPath + ", same file.");
return true;
}
context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());
// Ensure that the output folder is there and copy the file
outputFs.mkdirs(outputPath.getParent());
FSDataOutputStream out = outputFs.create(outputPath, true);
try {
if (!copyData(context, inputPath, in, outputPath, out, inputStat.getLen()))
return false;
} finally {
out.close();
}
// Preserve attributes
return preserveAttributes(outputPath, inputStat);
} finally {
in.close();
}
}
/**
* Preserve the files attribute selected by the user copying them from the source file
*/
private boolean preserveAttributes(final Path path, final FileStatus refStat) {
FileStatus stat;
try {
stat = outputFs.getFileStatus(path);
} catch (IOException e) {
LOG.warn("Unable to get the status for file=" + path);
return false;
}
try {
if (filesMode > 0 && stat.getPermission().toShort() != filesMode) {
outputFs.setPermission(path, new FsPermission(filesMode));
} else if (!stat.getPermission().equals(refStat.getPermission())) {
outputFs.setPermission(path, refStat.getPermission());
}
} catch (IOException e) {
LOG.error("Unable to set the permission for file=" + path, e);
return false;
}
try {
String user = (filesUser != null) ? filesUser : refStat.getOwner();
String group = (filesGroup != null) ? filesGroup : refStat.getGroup();
if (!(user.equals(stat.getOwner()) && group.equals(stat.getGroup()))) {
outputFs.setOwner(path, user, group);
}
} catch (IOException e) {
LOG.error("Unable to set the owner/group for file=" + path, e);
return false;
}
return true;
}
private boolean copyData(final Context context,
final Path inputPath, final FSDataInputStream in,
final Path outputPath, final FSDataOutputStream out,
final long inputFileSize) {
final String statusMessage = "copied %s/" + StringUtils.humanReadableInt(inputFileSize) +
" (%.3f%%) from " + inputPath + " to " + outputPath;
try {
byte[] buffer = new byte[BUFFER_SIZE];
long totalBytesWritten = 0;
int reportBytes = 0;
int bytesRead;
while ((bytesRead = in.read(buffer)) > 0) {
out.write(buffer, 0, bytesRead);
totalBytesWritten += bytesRead;
reportBytes += bytesRead;
if (reportBytes >= REPORT_SIZE) {
context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);
context.setStatus(String.format(statusMessage,
StringUtils.humanReadableInt(totalBytesWritten),
reportBytes/(float)inputFileSize));
reportBytes = 0;
}
}
context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);
context.setStatus(String.format(statusMessage,
StringUtils.humanReadableInt(totalBytesWritten),
reportBytes/(float)inputFileSize));
// Verify that the written size match
if (totalBytesWritten != inputFileSize) {
LOG.error("number of bytes copied not matching copied=" + totalBytesWritten +
" expected=" + inputFileSize + " for file=" + inputPath);
context.getCounter(Counter.COPY_FAILED).increment(1);
return false;
}
return true;
} catch (IOException e) {
LOG.error("Error copying " + inputPath + " to " + outputPath, e);
context.getCounter(Counter.COPY_FAILED).increment(1);
return false;
}
}
private FSDataInputStream openSourceFile(final Path path) {
try {
if (HFileLink.isHFileLink(path)) {
return new HFileLink(inputRoot, inputArchive, path).open(inputFs);
} else if (isHLogLinkPath(path)) {
String serverName = path.getParent().getName();
String logName = path.getName();
return new HLogLink(inputRoot, serverName, logName).open(inputFs);
}
return inputFs.open(path);
} catch (IOException e) {
LOG.error("Unable to open source file=" + path, e);
return null;
}
}
private FileStatus getFileStatus(final FileSystem fs, final Path path) {
try {
if (HFileLink.isHFileLink(path)) {
HFileLink link = new HFileLink(inputRoot, inputArchive, path);
return link.getFileStatus(fs);
} else if (isHLogLinkPath(path)) {
String serverName = path.getParent().getName();
String logName = path.getName();
return new HLogLink(inputRoot, serverName, logName).getFileStatus(fs);
}
return fs.getFileStatus(path);
} catch (IOException e) {
LOG.warn("Unable to get the status for file=" + path);
return null;
}
}
private FileChecksum getFileChecksum(final FileSystem fs, final Path path) {
try {
return fs.getFileChecksum(path);
} catch (IOException e) {
LOG.warn("Unable to get checksum for file=" + path, e);
return null;
}
}
/**
* Check if the two files are equal by looking at the file length,
* and at the checksum (if user has specified the verifyChecksum flag).
*/
private boolean sameFile(final FileStatus inputStat, final FileStatus outputStat) {
// Not matching length
if (inputStat.getLen() != outputStat.getLen()) return false;
// Mark files as equals, since user asked for no checksum verification
if (!verifyChecksum) return true;
// If checksums are not available, files are not the same.
FileChecksum inChecksum = getFileChecksum(inputFs, inputStat.getPath());
if (inChecksum == null) return false;
FileChecksum outChecksum = getFileChecksum(outputFs, outputStat.getPath());
if (outChecksum == null) return false;
return inChecksum.equals(outChecksum);
}
/**
* HLog files are encoded as serverName/logName
* and since all the other files should be in /hbase/table/..path..
* we can rely on the depth, for now.
*/
private static boolean isHLogLinkPath(final Path path) {
return path.depth() == 2;
}
}
/**
* Extract the list of files (HFiles/HLogs) to copy using Map-Reduce.
* @return list of files referenced by the snapshot (pair of path and size)
*/
private List<Pair<Path, Long>> getSnapshotFiles(final FileSystem fs, final Path snapshotDir)
throws IOException {
SnapshotDescription snapshotDesc = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
final List<Pair<Path, Long>> files = new ArrayList<Pair<Path, Long>>();
final String table = snapshotDesc.getTable();
final Configuration conf = getConf();
// Get snapshot files
SnapshotReferenceUtil.visitReferencedFiles(fs, snapshotDir,
new SnapshotReferenceUtil.FileVisitor() {
public void storeFile (final String region, final String family, final String hfile)
throws IOException {
Path path = new Path(family, HFileLink.createHFileLinkName(table, region, hfile));
long size = new HFileLink(conf, path).getFileStatus(fs).getLen();
files.add(new Pair<Path, Long>(path, size));
}
public void recoveredEdits (final String region, final String logfile)
throws IOException {
// copied with the snapshot referenecs
}
public void logFile (final String server, final String logfile)
throws IOException {
long size = new HLogLink(conf, server, logfile).getFileStatus(fs).getLen();
files.add(new Pair<Path, Long>(new Path(server, logfile), size));
}
});
return files;
}
/**
* Given a list of file paths and sizes, create around ngroups in as balanced a way as possible.
* The groups created will have similar amounts of bytes.
* <p>
* The algorithm used is pretty straightforward; the file list is sorted by size,
* and then each group fetch the bigger file available, iterating through groups
* alternating the direction.
*/
static List<List<Path>> getBalancedSplits(final List<Pair<Path, Long>> files, int ngroups) {
// Sort files by size, from small to big
Collections.sort(files, new Comparator<Pair<Path, Long>>() {
public int compare(Pair<Path, Long> a, Pair<Path, Long> b) {
long r = a.getSecond() - b.getSecond();
return (r < 0) ? -1 : ((r > 0) ? 1 : 0);
}
});
// create balanced groups
List<List<Path>> fileGroups = new LinkedList<List<Path>>();
long[] sizeGroups = new long[ngroups];
int hi = files.size() - 1;
int lo = 0;
List<Path> group;
int dir = 1;
int g = 0;
while (hi >= lo) {
if (g == fileGroups.size()) {
group = new LinkedList<Path>();
fileGroups.add(group);
} else {
group = fileGroups.get(g);
}
Pair<Path, Long> fileInfo = files.get(hi--);
// add the hi one
sizeGroups[g] += fileInfo.getSecond();
group.add(fileInfo.getFirst());
// change direction when at the end or the beginning
g += dir;
if (g == ngroups) {
dir = -1;
g = ngroups - 1;
} else if (g < 0) {
dir = 1;
g = 0;
}
}
if (LOG.isDebugEnabled()) {
for (int i = 0; i < sizeGroups.length; ++i) {
LOG.debug("export split=" + i + " size=" + StringUtils.humanReadableInt(sizeGroups[i]));
}
}
return fileGroups;
}
private static Path getInputFolderPath(final FileSystem fs, final Configuration conf)
throws IOException, InterruptedException {
String stagingName = "exportSnapshot-" + EnvironmentEdgeManager.currentTimeMillis();
Path stagingDir = new Path(conf.get(CONF_TMP_DIR), stagingName);
fs.mkdirs(stagingDir);
return new Path(stagingDir, INPUT_FOLDER_PREFIX +
String.valueOf(EnvironmentEdgeManager.currentTimeMillis()));
}
/**
* Create the input files, with the path to copy, for the MR job.
* Each input files contains n files, and each input file has a similar amount data to copy.
* The number of input files created are based on the number of mappers provided as argument
* and the number of the files to copy.
*/
private static Path[] createInputFiles(final Configuration conf,
final List<Pair<Path, Long>> snapshotFiles, int mappers)
throws IOException, InterruptedException {
FileSystem fs = FileSystem.get(conf);
Path inputFolderPath = getInputFolderPath(fs, conf);
LOG.debug("Input folder location: " + inputFolderPath);
List<List<Path>> splits = getBalancedSplits(snapshotFiles, mappers);
Path[] inputFiles = new Path[splits.size()];
Text key = new Text();
for (int i = 0; i < inputFiles.length; i++) {
List<Path> files = splits.get(i);
inputFiles[i] = new Path(inputFolderPath, String.format("export-%d.seq", i));
SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, inputFiles[i],
Text.class, NullWritable.class);
LOG.debug("Input split: " + i);
try {
for (Path file: files) {
LOG.debug(file.toString());
key.set(file.toString());
writer.append(key, NullWritable.get());
}
} finally {
writer.close();
}
}
return inputFiles;
}
/**
* Run Map-Reduce Job to perform the files copy.
*/
private boolean runCopyJob(final Path inputRoot, final Path outputRoot,
final List<Pair<Path, Long>> snapshotFiles, final boolean verifyChecksum,
final String filesUser, final String filesGroup, final int filesMode,
final int mappers) throws IOException, InterruptedException, ClassNotFoundException {
Configuration conf = getConf();
if (filesGroup != null) conf.set(CONF_FILES_GROUP, filesGroup);
if (filesUser != null) conf.set(CONF_FILES_USER, filesUser);
conf.setInt(CONF_FILES_MODE, filesMode);
conf.setBoolean(CONF_CHECKSUM_VERIFY, verifyChecksum);
conf.set(CONF_OUTPUT_ROOT, outputRoot.toString());
conf.set(CONF_INPUT_ROOT, inputRoot.toString());
conf.setInt("mapreduce.job.maps", mappers);
// job.setMapSpeculativeExecution(false)
conf.setBoolean("mapreduce.map.speculative", false);
conf.setBoolean("mapreduce.reduce.speculative", false);
conf.setBoolean("mapred.map.tasks.speculative.execution", false);
conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
Job job = new Job(conf);
job.setJobName("ExportSnapshot");
job.setJarByClass(ExportSnapshot.class);
job.setMapperClass(ExportMapper.class);
job.setInputFormatClass(SequenceFileInputFormat.class);
job.setOutputFormatClass(NullOutputFormat.class);
job.setNumReduceTasks(0);
for (Path path: createInputFiles(conf, snapshotFiles, mappers)) {
LOG.debug("Add Input Path=" + path);
SequenceFileInputFormat.addInputPath(job, path);
}
return job.waitForCompletion(true);
}
/**
* Execute the export snapshot by copying the snapshot metadata, hfiles and hlogs.
* @return 0 on success, and != 0 upon failure.
*/
@Override
public int run(String[] args) throws Exception {
boolean verifyChecksum = true;
String snapshotName = null;
String filesGroup = null;
String filesUser = null;
Path outputRoot = null;
int filesMode = 0;
int mappers = getConf().getInt("mapreduce.job.maps", 1);
// Process command line args
for (int i = 0; i < args.length; i++) {
String cmd = args[i];
try {
if (cmd.equals("-snapshot")) {
snapshotName = args[++i];
} else if (cmd.equals("-copy-to")) {
outputRoot = new Path(args[++i]);
} else if (cmd.equals("-no-checksum-verify")) {
verifyChecksum = false;
} else if (cmd.equals("-mappers")) {
mappers = Integer.parseInt(args[++i]);
} else if (cmd.equals("-chuser")) {
filesUser = args[++i];
} else if (cmd.equals("-chgroup")) {
filesGroup = args[++i];
} else if (cmd.equals("-chmod")) {
filesMode = Integer.parseInt(args[++i], 8);
} else if (cmd.equals("-h") || cmd.equals("--help")) {
printUsageAndExit();
} else {
System.err.println("UNEXPECTED: " + cmd);
printUsageAndExit();
}
} catch (Exception e) {
printUsageAndExit();
}
}
// Check user options
if (snapshotName == null) {
System.err.println("Snapshot name not provided.");
printUsageAndExit();
}
if (outputRoot == null) {
System.err.println("Destination file-system not provided.");
printUsageAndExit();
}
Configuration conf = getConf();
Path inputRoot = FSUtils.getRootDir(conf);
FileSystem inputFs = FileSystem.get(conf);
FileSystem outputFs = FileSystem.get(outputRoot.toUri(), conf);
Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, inputRoot);
Path snapshotTmpDir = SnapshotDescriptionUtils.getWorkingSnapshotDir(snapshotName, outputRoot);
Path outputSnapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, outputRoot);
// Check if the snapshot already exists
if (outputFs.exists(outputSnapshotDir)) {
System.err.println("The snapshot '" + snapshotName +
"' already exists in the destination: " + outputSnapshotDir);
return 1;
}
// Check if the snapshot already in-progress
if (outputFs.exists(snapshotTmpDir)) {
System.err.println("A snapshot with the same name '" + snapshotName + "' is in-progress");
return 1;
}
// Step 0 - Extract snapshot files to copy
final List<Pair<Path, Long>> files = getSnapshotFiles(inputFs, snapshotDir);
// Step 1 - Copy fs1:/.snapshot/<snapshot> to fs2:/.snapshot/.tmp/<snapshot>
// The snapshot references must be copied before the hfiles otherwise the cleaner
// will remove them because they are unreferenced.
try {
FileUtil.copy(inputFs, snapshotDir, outputFs, snapshotTmpDir, false, false, conf);
} catch (IOException e) {
System.err.println("Failed to copy the snapshot directory: from=" + snapshotDir +
" to=" + snapshotTmpDir);
e.printStackTrace(System.err);
return 1;
}
// Step 2 - Start MR Job to copy files
// The snapshot references must be copied before the files otherwise the files gets removed
// by the HFileArchiver, since they have no references.
try {
if (files.size() == 0) {
LOG.warn("There are 0 store file to be copied. There may be no data in the table.");
} else {
if (!runCopyJob(inputRoot, outputRoot, files, verifyChecksum,
filesUser, filesGroup, filesMode, mappers)) {
throw new ExportSnapshotException("Snapshot export failed!");
}
}
// Step 3 - Rename fs2:/.snapshot/.tmp/<snapshot> fs2:/.snapshot/<snapshot>
if (!outputFs.rename(snapshotTmpDir, outputSnapshotDir)) {
System.err.println("Snapshot export failed!");
System.err.println("Unable to rename snapshot directory from=" +
snapshotTmpDir + " to=" + outputSnapshotDir);
return 1;
}
return 0;
} catch (Exception e) {
System.err.println("Snapshot export failed!");
e.printStackTrace(System.err);
outputFs.delete(outputSnapshotDir, true);
return 1;
}
}
// ExportSnapshot
private void printUsageAndExit() {
System.err.printf("Usage: bin/hbase %s [options]%n", getClass().getName());
System.err.println(" where [options] are:");
System.err.println(" -h|-help Show this help and exit.");
System.err.println(" -snapshot NAME Snapshot to restore.");
System.err.println(" -copy-to NAME Remote destination hdfs://");
System.err.println(" -no-checksum-verify Do not verify checksum.");
System.err.println(" -chuser USERNAME Change the owner of the files to the specified one.");
System.err.println(" -chgroup GROUP Change the group of the files to the specified one.");
System.err.println(" -chmod MODE Change the permission of the files to the specified one.");
System.err.println(" -mappers Number of mappers to use during the copy (mapreduce.job.maps).");
System.err.println();
System.err.println("Examples:");
System.err.println(" hbase " + getClass() + " \\");
System.err.println(" -snapshot MySnapshot -copy-to hdfs:///srv2:8082/hbase \\");
System.err.println(" -chuser MyUser -chgroup MyGroup -chmod 700 -mappers 16");
System.exit(1);
}
/**
* The guts of the {@link #main} method.
* Call this method to avoid the {@link #main(String[])} System.exit.
* @param args
* @return errCode
* @throws Exception
*/
static int innerMain(final Configuration conf, final String [] args) throws Exception {
return ToolRunner.run(conf, new ExportSnapshot(), args);
}
public static void main(String[] args) throws Exception {
System.exit(innerMain(HBaseConfiguration.create(), args));
}
}
| |
/**
* Copyright 2017 Eternita LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.frontcache.client;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FrontCacheCluster {
private Set<FrontCacheClient> fcCluster = new HashSet<FrontCacheClient>();
private final static String DEFAULT_CLUSTER_CONFIG_NAME = "frontcache-cluster.conf";
private final static String SITE_KEY_CONFIG_FILE = "frontcache-site-key.conf";
private final static String DEFAULT_SITE_KEY = "";
private Logger logger = LoggerFactory.getLogger(FrontCacheCluster.class);
private static final int THREAD_AMOUNT = 4;
private ExecutorService executor = Executors.newFixedThreadPool(THREAD_AMOUNT);
private static final long FRONTCACHE_CLIENT_TIMEOUT = 5*1000; // 5 second
public FrontCacheCluster(Collection<String> fcURLSet, String siteKey)
{
for (String url : fcURLSet)
fcCluster.add(new FrontCacheClient(url, siteKey));
}
public FrontCacheCluster(Collection<FrontCacheClient> fcClients)
{
for (FrontCacheClient fcClient : fcClients)
fcCluster.add(fcClient);
}
public FrontCacheCluster()
{
this(DEFAULT_CLUSTER_CONFIG_NAME);
}
public FrontCacheCluster(String configResourceName)
{
Set<String> fcURLSet = loadFrontcacheClusterNodes(configResourceName);
String siteKey = loadSiteKey();
for (String url : fcURLSet)
fcCluster.add(new FrontCacheClient(url, siteKey));
}
public void close()
{
executor.shutdown();
}
private String loadSiteKey()
{
String siteKey = DEFAULT_SITE_KEY;
BufferedReader confReader = null;
InputStream is = null;
try
{
is = FrontCacheCluster.class.getClassLoader().getResourceAsStream(SITE_KEY_CONFIG_FILE);
if (null == is)
{
return siteKey;
}
confReader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
siteKey = confReader.readLine();
if (null == siteKey)
siteKey = "";
} catch (Exception e) {
// TODO: log ...
// throw new RuntimeException("Frontcache cluster nodes can't be loaded from " + configName, e);
} finally {
if (null != confReader)
{
try {
confReader.close();
} catch (IOException e) { }
}
if (null != is)
{
try {
is.close();
} catch (IOException e) { }
}
}
return siteKey;
}
private Set<String> loadFrontcacheClusterNodes(String configName) {
Set<String> fcURLSet = new HashSet<String>();
BufferedReader confReader = null;
InputStream is = null;
try
{
is = FrontCacheCluster.class.getClassLoader().getResourceAsStream(configName);
if (null == is)
throw new RuntimeException("Frontcache cluster nodes can't be loaded from " + configName);
confReader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
String clusterNodeURLStr;
while ((clusterNodeURLStr = confReader.readLine()) != null) {
if (clusterNodeURLStr.trim().startsWith("#")) // handle comments
continue;
if (0 == clusterNodeURLStr.trim().length()) // skip empty
continue;
fcURLSet.add(clusterNodeURLStr);
}
} catch (Exception e) {
logger.error("Frontcache cluster nodes can't be loaded from " + configName, e);
throw new RuntimeException("Frontcache cluster nodes can't be loaded from " + configName, e);
} finally {
if (null != confReader)
{
try {
confReader.close();
} catch (IOException e) { }
}
if (null != is)
{
try {
is.close();
} catch (IOException e) { }
}
}
return fcURLSet;
}
public Set<String> getNodes()
{
Set<String> nodes = new HashSet<String>();
for (FrontCacheClient client : fcCluster)
nodes.add(client.getFrontCacheURL());
return nodes;
}
// /**
// *
// * @return
// */
// public Map<FrontCacheClient, Map<String, String>> getCacheState()
// {
// Map<FrontCacheClient, Map<String, String>> response = new ConcurrentHashMap<FrontCacheClient, Map<String, String>>();
//// fcCluster.forEach(client -> response.put(client.getFrontCacheURL() ,client.getCacheState()));
//
// for (FrontCacheClient client : fcCluster)
// {
// Map<String, String> cacheStatus = client.getCacheState();
// if (null != cacheStatus)
// response.put(client, cacheStatus);
// }
//
// return response;
// }
/**
*
* @param filter
* @return
*/
public Map<String, String> removeFromCache(String filter)
{
Map<String, String> response = new ConcurrentHashMap<String, String>();
List<Future<InvalidationResponse>> futureList = new ArrayList<Future<InvalidationResponse>>();
fcCluster.forEach(client -> futureList.add(executor.submit(new InvalidationCaller(client, filter))));
futureList.forEach(f ->
{
try {
InvalidationResponse result = f.get(FRONTCACHE_CLIENT_TIMEOUT, TimeUnit.MILLISECONDS);
if (null != result)
response.put(result.getName(), result.getResponse());
} catch (TimeoutException | InterruptedException | ExecutionException e) {
f.cancel(true);
logger.debug("timeout (" + FRONTCACHE_CLIENT_TIMEOUT + ") reached for invalidation. Some cache instances may not invalidated ");
}
}
);
return response;
}
/**
*
* @return
*/
public Map<String, String> removeFromCacheAll()
{
return removeFromCache(null);
}
}
/**
*
*/
class InvalidationResponse {
private String name;
private String response;
public InvalidationResponse(String name, String response) {
super();
this.name = name;
this.response = response;
}
public String getName() {
return name;
}
public String getResponse() {
return response;
}
}
/**
*
*/
class InvalidationCaller implements Callable<InvalidationResponse> {
private FrontCacheClient fcClient;
private String filter;
public InvalidationCaller(FrontCacheClient fcClient, String filter) {
super();
this.fcClient = fcClient;
this.filter = filter;
}
@Override
public InvalidationResponse call() throws Exception {
String resp = (null == filter) ? fcClient.removeFromCacheAll() : fcClient.removeFromCache(filter);
return new InvalidationResponse(fcClient.getFrontCacheURL(), resp);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/logging/v2/logging_config.proto
package com.google.logging.v2;
/**
* <pre>
* The parameters to `UpdateSink`.
* </pre>
*
* Protobuf type {@code google.logging.v2.UpdateSinkRequest}
*/
public final class UpdateSinkRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.logging.v2.UpdateSinkRequest)
UpdateSinkRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateSinkRequest.newBuilder() to construct.
private UpdateSinkRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateSinkRequest() {
sinkName_ = "";
uniqueWriterIdentity_ = false;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private UpdateSinkRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
sinkName_ = s;
break;
}
case 18: {
com.google.logging.v2.LogSink.Builder subBuilder = null;
if (sink_ != null) {
subBuilder = sink_.toBuilder();
}
sink_ = input.readMessage(com.google.logging.v2.LogSink.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(sink_);
sink_ = subBuilder.buildPartial();
}
break;
}
case 24: {
uniqueWriterIdentity_ = input.readBool();
break;
}
case 34: {
com.google.protobuf.FieldMask.Builder subBuilder = null;
if (updateMask_ != null) {
subBuilder = updateMask_.toBuilder();
}
updateMask_ = input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(updateMask_);
updateMask_ = subBuilder.buildPartial();
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.logging.v2.LoggingConfigProto.internal_static_google_logging_v2_UpdateSinkRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.logging.v2.LoggingConfigProto.internal_static_google_logging_v2_UpdateSinkRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.logging.v2.UpdateSinkRequest.class, com.google.logging.v2.UpdateSinkRequest.Builder.class);
}
public static final int SINK_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object sinkName_;
/**
* <pre>
* Required. The full resource name of the sink to update, including the
* parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: `"projects/my-project-id/sinks/my-sink-id"`.
* </pre>
*
* <code>string sink_name = 1;</code>
*/
public java.lang.String getSinkName() {
java.lang.Object ref = sinkName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sinkName_ = s;
return s;
}
}
/**
* <pre>
* Required. The full resource name of the sink to update, including the
* parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: `"projects/my-project-id/sinks/my-sink-id"`.
* </pre>
*
* <code>string sink_name = 1;</code>
*/
public com.google.protobuf.ByteString
getSinkNameBytes() {
java.lang.Object ref = sinkName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
sinkName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SINK_FIELD_NUMBER = 2;
private com.google.logging.v2.LogSink sink_;
/**
* <pre>
* Required. The updated sink, whose name is the same identifier that appears
* as part of `sink_name`.
* </pre>
*
* <code>.google.logging.v2.LogSink sink = 2;</code>
*/
public boolean hasSink() {
return sink_ != null;
}
/**
* <pre>
* Required. The updated sink, whose name is the same identifier that appears
* as part of `sink_name`.
* </pre>
*
* <code>.google.logging.v2.LogSink sink = 2;</code>
*/
public com.google.logging.v2.LogSink getSink() {
return sink_ == null ? com.google.logging.v2.LogSink.getDefaultInstance() : sink_;
}
/**
* <pre>
* Required. The updated sink, whose name is the same identifier that appears
* as part of `sink_name`.
* </pre>
*
* <code>.google.logging.v2.LogSink sink = 2;</code>
*/
public com.google.logging.v2.LogSinkOrBuilder getSinkOrBuilder() {
return getSink();
}
public static final int UNIQUE_WRITER_IDENTITY_FIELD_NUMBER = 3;
private boolean uniqueWriterIdentity_;
/**
* <pre>
* Optional. See
* [sinks.create](/logging/docs/api/reference/rest/v2/projects.sinks/create)
* for a description of this field. When updating a sink, the effect of this
* field on the value of `writer_identity` in the updated sink depends on both
* the old and new values of this field:
* + If the old and new values of this field are both false or both true,
* then there is no change to the sink's `writer_identity`.
* + If the old value is false and the new value is true, then
* `writer_identity` is changed to a unique service account.
* + It is an error if the old value is true and the new value is
* set to false or defaulted to false.
* </pre>
*
* <code>bool unique_writer_identity = 3;</code>
*/
public boolean getUniqueWriterIdentity() {
return uniqueWriterIdentity_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 4;
private com.google.protobuf.FieldMask updateMask_;
/**
* <pre>
* Optional. Field mask that specifies the fields in `sink` that need
* an update. A sink field will be overwritten if, and only if, it is
* in the update mask. `name` and output only fields cannot be updated.
* An empty updateMask is temporarily treated as using the following mask
* for backwards compatibility purposes:
* destination,filter,includeChildren
* At some point in the future, behavior will be removed and specifying an
* empty updateMask will be an error.
* For a detailed `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* Example: `updateMask=filter`.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public boolean hasUpdateMask() {
return updateMask_ != null;
}
/**
* <pre>
* Optional. Field mask that specifies the fields in `sink` that need
* an update. A sink field will be overwritten if, and only if, it is
* in the update mask. `name` and output only fields cannot be updated.
* An empty updateMask is temporarily treated as using the following mask
* for backwards compatibility purposes:
* destination,filter,includeChildren
* At some point in the future, behavior will be removed and specifying an
* empty updateMask will be an error.
* For a detailed `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* Example: `updateMask=filter`.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
* <pre>
* Optional. Field mask that specifies the fields in `sink` that need
* an update. A sink field will be overwritten if, and only if, it is
* in the update mask. `name` and output only fields cannot be updated.
* An empty updateMask is temporarily treated as using the following mask
* for backwards compatibility purposes:
* destination,filter,includeChildren
* At some point in the future, behavior will be removed and specifying an
* empty updateMask will be an error.
* For a detailed `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* Example: `updateMask=filter`.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return getUpdateMask();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getSinkNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, sinkName_);
}
if (sink_ != null) {
output.writeMessage(2, getSink());
}
if (uniqueWriterIdentity_ != false) {
output.writeBool(3, uniqueWriterIdentity_);
}
if (updateMask_ != null) {
output.writeMessage(4, getUpdateMask());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getSinkNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, sinkName_);
}
if (sink_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getSink());
}
if (uniqueWriterIdentity_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(3, uniqueWriterIdentity_);
}
if (updateMask_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, getUpdateMask());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.logging.v2.UpdateSinkRequest)) {
return super.equals(obj);
}
com.google.logging.v2.UpdateSinkRequest other = (com.google.logging.v2.UpdateSinkRequest) obj;
boolean result = true;
result = result && getSinkName()
.equals(other.getSinkName());
result = result && (hasSink() == other.hasSink());
if (hasSink()) {
result = result && getSink()
.equals(other.getSink());
}
result = result && (getUniqueWriterIdentity()
== other.getUniqueWriterIdentity());
result = result && (hasUpdateMask() == other.hasUpdateMask());
if (hasUpdateMask()) {
result = result && getUpdateMask()
.equals(other.getUpdateMask());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SINK_NAME_FIELD_NUMBER;
hash = (53 * hash) + getSinkName().hashCode();
if (hasSink()) {
hash = (37 * hash) + SINK_FIELD_NUMBER;
hash = (53 * hash) + getSink().hashCode();
}
hash = (37 * hash) + UNIQUE_WRITER_IDENTITY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getUniqueWriterIdentity());
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.logging.v2.UpdateSinkRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.logging.v2.UpdateSinkRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.logging.v2.UpdateSinkRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.logging.v2.UpdateSinkRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.logging.v2.UpdateSinkRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.logging.v2.UpdateSinkRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.logging.v2.UpdateSinkRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.logging.v2.UpdateSinkRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.logging.v2.UpdateSinkRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.logging.v2.UpdateSinkRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.logging.v2.UpdateSinkRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.logging.v2.UpdateSinkRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.logging.v2.UpdateSinkRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* The parameters to `UpdateSink`.
* </pre>
*
* Protobuf type {@code google.logging.v2.UpdateSinkRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.logging.v2.UpdateSinkRequest)
com.google.logging.v2.UpdateSinkRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.logging.v2.LoggingConfigProto.internal_static_google_logging_v2_UpdateSinkRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.logging.v2.LoggingConfigProto.internal_static_google_logging_v2_UpdateSinkRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.logging.v2.UpdateSinkRequest.class, com.google.logging.v2.UpdateSinkRequest.Builder.class);
}
// Construct using com.google.logging.v2.UpdateSinkRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
sinkName_ = "";
if (sinkBuilder_ == null) {
sink_ = null;
} else {
sink_ = null;
sinkBuilder_ = null;
}
uniqueWriterIdentity_ = false;
if (updateMaskBuilder_ == null) {
updateMask_ = null;
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.logging.v2.LoggingConfigProto.internal_static_google_logging_v2_UpdateSinkRequest_descriptor;
}
public com.google.logging.v2.UpdateSinkRequest getDefaultInstanceForType() {
return com.google.logging.v2.UpdateSinkRequest.getDefaultInstance();
}
public com.google.logging.v2.UpdateSinkRequest build() {
com.google.logging.v2.UpdateSinkRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.logging.v2.UpdateSinkRequest buildPartial() {
com.google.logging.v2.UpdateSinkRequest result = new com.google.logging.v2.UpdateSinkRequest(this);
result.sinkName_ = sinkName_;
if (sinkBuilder_ == null) {
result.sink_ = sink_;
} else {
result.sink_ = sinkBuilder_.build();
}
result.uniqueWriterIdentity_ = uniqueWriterIdentity_;
if (updateMaskBuilder_ == null) {
result.updateMask_ = updateMask_;
} else {
result.updateMask_ = updateMaskBuilder_.build();
}
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.logging.v2.UpdateSinkRequest) {
return mergeFrom((com.google.logging.v2.UpdateSinkRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.logging.v2.UpdateSinkRequest other) {
if (other == com.google.logging.v2.UpdateSinkRequest.getDefaultInstance()) return this;
if (!other.getSinkName().isEmpty()) {
sinkName_ = other.sinkName_;
onChanged();
}
if (other.hasSink()) {
mergeSink(other.getSink());
}
if (other.getUniqueWriterIdentity() != false) {
setUniqueWriterIdentity(other.getUniqueWriterIdentity());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.logging.v2.UpdateSinkRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.logging.v2.UpdateSinkRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object sinkName_ = "";
/**
* <pre>
* Required. The full resource name of the sink to update, including the
* parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: `"projects/my-project-id/sinks/my-sink-id"`.
* </pre>
*
* <code>string sink_name = 1;</code>
*/
public java.lang.String getSinkName() {
java.lang.Object ref = sinkName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sinkName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The full resource name of the sink to update, including the
* parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: `"projects/my-project-id/sinks/my-sink-id"`.
* </pre>
*
* <code>string sink_name = 1;</code>
*/
public com.google.protobuf.ByteString
getSinkNameBytes() {
java.lang.Object ref = sinkName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
sinkName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The full resource name of the sink to update, including the
* parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: `"projects/my-project-id/sinks/my-sink-id"`.
* </pre>
*
* <code>string sink_name = 1;</code>
*/
public Builder setSinkName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
sinkName_ = value;
onChanged();
return this;
}
/**
* <pre>
* Required. The full resource name of the sink to update, including the
* parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: `"projects/my-project-id/sinks/my-sink-id"`.
* </pre>
*
* <code>string sink_name = 1;</code>
*/
public Builder clearSinkName() {
sinkName_ = getDefaultInstance().getSinkName();
onChanged();
return this;
}
/**
* <pre>
* Required. The full resource name of the sink to update, including the
* parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: `"projects/my-project-id/sinks/my-sink-id"`.
* </pre>
*
* <code>string sink_name = 1;</code>
*/
public Builder setSinkNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
sinkName_ = value;
onChanged();
return this;
}
private com.google.logging.v2.LogSink sink_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.logging.v2.LogSink, com.google.logging.v2.LogSink.Builder, com.google.logging.v2.LogSinkOrBuilder> sinkBuilder_;
/**
* <pre>
* Required. The updated sink, whose name is the same identifier that appears
* as part of `sink_name`.
* </pre>
*
* <code>.google.logging.v2.LogSink sink = 2;</code>
*/
public boolean hasSink() {
return sinkBuilder_ != null || sink_ != null;
}
/**
* <pre>
* Required. The updated sink, whose name is the same identifier that appears
* as part of `sink_name`.
* </pre>
*
* <code>.google.logging.v2.LogSink sink = 2;</code>
*/
public com.google.logging.v2.LogSink getSink() {
if (sinkBuilder_ == null) {
return sink_ == null ? com.google.logging.v2.LogSink.getDefaultInstance() : sink_;
} else {
return sinkBuilder_.getMessage();
}
}
/**
* <pre>
* Required. The updated sink, whose name is the same identifier that appears
* as part of `sink_name`.
* </pre>
*
* <code>.google.logging.v2.LogSink sink = 2;</code>
*/
public Builder setSink(com.google.logging.v2.LogSink value) {
if (sinkBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
sink_ = value;
onChanged();
} else {
sinkBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* Required. The updated sink, whose name is the same identifier that appears
* as part of `sink_name`.
* </pre>
*
* <code>.google.logging.v2.LogSink sink = 2;</code>
*/
public Builder setSink(
com.google.logging.v2.LogSink.Builder builderForValue) {
if (sinkBuilder_ == null) {
sink_ = builderForValue.build();
onChanged();
} else {
sinkBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. The updated sink, whose name is the same identifier that appears
* as part of `sink_name`.
* </pre>
*
* <code>.google.logging.v2.LogSink sink = 2;</code>
*/
public Builder mergeSink(com.google.logging.v2.LogSink value) {
if (sinkBuilder_ == null) {
if (sink_ != null) {
sink_ =
com.google.logging.v2.LogSink.newBuilder(sink_).mergeFrom(value).buildPartial();
} else {
sink_ = value;
}
onChanged();
} else {
sinkBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* Required. The updated sink, whose name is the same identifier that appears
* as part of `sink_name`.
* </pre>
*
* <code>.google.logging.v2.LogSink sink = 2;</code>
*/
public Builder clearSink() {
if (sinkBuilder_ == null) {
sink_ = null;
onChanged();
} else {
sink_ = null;
sinkBuilder_ = null;
}
return this;
}
/**
* <pre>
* Required. The updated sink, whose name is the same identifier that appears
* as part of `sink_name`.
* </pre>
*
* <code>.google.logging.v2.LogSink sink = 2;</code>
*/
public com.google.logging.v2.LogSink.Builder getSinkBuilder() {
onChanged();
return getSinkFieldBuilder().getBuilder();
}
/**
* <pre>
* Required. The updated sink, whose name is the same identifier that appears
* as part of `sink_name`.
* </pre>
*
* <code>.google.logging.v2.LogSink sink = 2;</code>
*/
public com.google.logging.v2.LogSinkOrBuilder getSinkOrBuilder() {
if (sinkBuilder_ != null) {
return sinkBuilder_.getMessageOrBuilder();
} else {
return sink_ == null ?
com.google.logging.v2.LogSink.getDefaultInstance() : sink_;
}
}
/**
* <pre>
* Required. The updated sink, whose name is the same identifier that appears
* as part of `sink_name`.
* </pre>
*
* <code>.google.logging.v2.LogSink sink = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.logging.v2.LogSink, com.google.logging.v2.LogSink.Builder, com.google.logging.v2.LogSinkOrBuilder>
getSinkFieldBuilder() {
if (sinkBuilder_ == null) {
sinkBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.logging.v2.LogSink, com.google.logging.v2.LogSink.Builder, com.google.logging.v2.LogSinkOrBuilder>(
getSink(),
getParentForChildren(),
isClean());
sink_ = null;
}
return sinkBuilder_;
}
private boolean uniqueWriterIdentity_ ;
/**
* <pre>
* Optional. See
* [sinks.create](/logging/docs/api/reference/rest/v2/projects.sinks/create)
* for a description of this field. When updating a sink, the effect of this
* field on the value of `writer_identity` in the updated sink depends on both
* the old and new values of this field:
* + If the old and new values of this field are both false or both true,
* then there is no change to the sink's `writer_identity`.
* + If the old value is false and the new value is true, then
* `writer_identity` is changed to a unique service account.
* + It is an error if the old value is true and the new value is
* set to false or defaulted to false.
* </pre>
*
* <code>bool unique_writer_identity = 3;</code>
*/
public boolean getUniqueWriterIdentity() {
return uniqueWriterIdentity_;
}
/**
* <pre>
* Optional. See
* [sinks.create](/logging/docs/api/reference/rest/v2/projects.sinks/create)
* for a description of this field. When updating a sink, the effect of this
* field on the value of `writer_identity` in the updated sink depends on both
* the old and new values of this field:
* + If the old and new values of this field are both false or both true,
* then there is no change to the sink's `writer_identity`.
* + If the old value is false and the new value is true, then
* `writer_identity` is changed to a unique service account.
* + It is an error if the old value is true and the new value is
* set to false or defaulted to false.
* </pre>
*
* <code>bool unique_writer_identity = 3;</code>
*/
public Builder setUniqueWriterIdentity(boolean value) {
uniqueWriterIdentity_ = value;
onChanged();
return this;
}
/**
* <pre>
* Optional. See
* [sinks.create](/logging/docs/api/reference/rest/v2/projects.sinks/create)
* for a description of this field. When updating a sink, the effect of this
* field on the value of `writer_identity` in the updated sink depends on both
* the old and new values of this field:
* + If the old and new values of this field are both false or both true,
* then there is no change to the sink's `writer_identity`.
* + If the old value is false and the new value is true, then
* `writer_identity` is changed to a unique service account.
* + It is an error if the old value is true and the new value is
* set to false or defaulted to false.
* </pre>
*
* <code>bool unique_writer_identity = 3;</code>
*/
public Builder clearUniqueWriterIdentity() {
uniqueWriterIdentity_ = false;
onChanged();
return this;
}
private com.google.protobuf.FieldMask updateMask_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_;
/**
* <pre>
* Optional. Field mask that specifies the fields in `sink` that need
* an update. A sink field will be overwritten if, and only if, it is
* in the update mask. `name` and output only fields cannot be updated.
* An empty updateMask is temporarily treated as using the following mask
* for backwards compatibility purposes:
* destination,filter,includeChildren
* At some point in the future, behavior will be removed and specifying an
* empty updateMask will be an error.
* For a detailed `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* Example: `updateMask=filter`.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public boolean hasUpdateMask() {
return updateMaskBuilder_ != null || updateMask_ != null;
}
/**
* <pre>
* Optional. Field mask that specifies the fields in `sink` that need
* an update. A sink field will be overwritten if, and only if, it is
* in the update mask. `name` and output only fields cannot be updated.
* An empty updateMask is temporarily treated as using the following mask
* for backwards compatibility purposes:
* destination,filter,includeChildren
* At some point in the future, behavior will be removed and specifying an
* empty updateMask will be an error.
* For a detailed `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* Example: `updateMask=filter`.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
* <pre>
* Optional. Field mask that specifies the fields in `sink` that need
* an update. A sink field will be overwritten if, and only if, it is
* in the update mask. `name` and output only fields cannot be updated.
* An empty updateMask is temporarily treated as using the following mask
* for backwards compatibility purposes:
* destination,filter,includeChildren
* At some point in the future, behavior will be removed and specifying an
* empty updateMask will be an error.
* For a detailed `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* Example: `updateMask=filter`.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
onChanged();
} else {
updateMaskBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* Optional. Field mask that specifies the fields in `sink` that need
* an update. A sink field will be overwritten if, and only if, it is
* in the update mask. `name` and output only fields cannot be updated.
* An empty updateMask is temporarily treated as using the following mask
* for backwards compatibility purposes:
* destination,filter,includeChildren
* At some point in the future, behavior will be removed and specifying an
* empty updateMask will be an error.
* For a detailed `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* Example: `updateMask=filter`.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public Builder setUpdateMask(
com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
onChanged();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Optional. Field mask that specifies the fields in `sink` that need
* an update. A sink field will be overwritten if, and only if, it is
* in the update mask. `name` and output only fields cannot be updated.
* An empty updateMask is temporarily treated as using the following mask
* for backwards compatibility purposes:
* destination,filter,includeChildren
* At some point in the future, behavior will be removed and specifying an
* empty updateMask will be an error.
* For a detailed `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* Example: `updateMask=filter`.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (updateMask_ != null) {
updateMask_ =
com.google.protobuf.FieldMask.newBuilder(updateMask_).mergeFrom(value).buildPartial();
} else {
updateMask_ = value;
}
onChanged();
} else {
updateMaskBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* Optional. Field mask that specifies the fields in `sink` that need
* an update. A sink field will be overwritten if, and only if, it is
* in the update mask. `name` and output only fields cannot be updated.
* An empty updateMask is temporarily treated as using the following mask
* for backwards compatibility purposes:
* destination,filter,includeChildren
* At some point in the future, behavior will be removed and specifying an
* empty updateMask will be an error.
* For a detailed `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* Example: `updateMask=filter`.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public Builder clearUpdateMask() {
if (updateMaskBuilder_ == null) {
updateMask_ = null;
onChanged();
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
return this;
}
/**
* <pre>
* Optional. Field mask that specifies the fields in `sink` that need
* an update. A sink field will be overwritten if, and only if, it is
* in the update mask. `name` and output only fields cannot be updated.
* An empty updateMask is temporarily treated as using the following mask
* for backwards compatibility purposes:
* destination,filter,includeChildren
* At some point in the future, behavior will be removed and specifying an
* empty updateMask will be an error.
* For a detailed `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* Example: `updateMask=filter`.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
* <pre>
* Optional. Field mask that specifies the fields in `sink` that need
* an update. A sink field will be overwritten if, and only if, it is
* in the update mask. `name` and output only fields cannot be updated.
* An empty updateMask is temporarily treated as using the following mask
* for backwards compatibility purposes:
* destination,filter,includeChildren
* At some point in the future, behavior will be removed and specifying an
* empty updateMask will be an error.
* For a detailed `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* Example: `updateMask=filter`.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null ?
com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
}
/**
* <pre>
* Optional. Field mask that specifies the fields in `sink` that need
* an update. A sink field will be overwritten if, and only if, it is
* in the update mask. `name` and output only fields cannot be updated.
* An empty updateMask is temporarily treated as using the following mask
* for backwards compatibility purposes:
* destination,filter,includeChildren
* At some point in the future, behavior will be removed and specifying an
* empty updateMask will be an error.
* For a detailed `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* Example: `updateMask=filter`.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(),
getParentForChildren(),
isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.logging.v2.UpdateSinkRequest)
}
// @@protoc_insertion_point(class_scope:google.logging.v2.UpdateSinkRequest)
private static final com.google.logging.v2.UpdateSinkRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.logging.v2.UpdateSinkRequest();
}
public static com.google.logging.v2.UpdateSinkRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateSinkRequest>
PARSER = new com.google.protobuf.AbstractParser<UpdateSinkRequest>() {
public UpdateSinkRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UpdateSinkRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<UpdateSinkRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateSinkRequest> getParserForType() {
return PARSER;
}
public com.google.logging.v2.UpdateSinkRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package org.apereo.cas.support.saml.web.idp.profile.builders.enc;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.configuration.model.support.saml.idp.SamlIdPResponseProperties;
import org.apereo.cas.support.saml.SamlException;
import org.apereo.cas.support.saml.SamlIdPUtils;
import org.apereo.cas.support.saml.SamlUtils;
import org.apereo.cas.support.saml.idp.metadata.locator.SamlIdPMetadataLocator;
import org.apereo.cas.support.saml.services.SamlRegisteredService;
import org.apereo.cas.support.saml.services.idp.metadata.SamlRegisteredServiceServiceProviderMetadataFacade;
import org.apereo.cas.util.crypto.CertUtils;
import org.apereo.cas.util.crypto.PrivateKeyFactoryBean;
import com.google.common.collect.Sets;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import net.shibboleth.utilities.java.support.resolver.CriteriaSet;
import org.apache.commons.lang3.StringUtils;
import org.opensaml.core.criterion.EntityIdCriterion;
import org.opensaml.messaging.context.MessageContext;
import org.opensaml.saml.common.SAMLException;
import org.opensaml.saml.common.SAMLObject;
import org.opensaml.saml.common.binding.impl.SAMLOutboundDestinationHandler;
import org.opensaml.saml.common.binding.security.impl.EndpointURLSchemeSecurityHandler;
import org.opensaml.saml.common.binding.security.impl.SAMLOutboundProtocolMessageSigningHandler;
import org.opensaml.saml.criterion.EntityRoleCriterion;
import org.opensaml.saml.criterion.RoleDescriptorCriterion;
import org.opensaml.saml.metadata.resolver.MetadataResolver;
import org.opensaml.saml.saml2.core.RequestAbstractType;
import org.opensaml.saml.saml2.metadata.IDPSSODescriptor;
import org.opensaml.saml.saml2.metadata.RoleDescriptor;
import org.opensaml.saml.security.impl.MetadataCredentialResolver;
import org.opensaml.saml.security.impl.SAMLMetadataSignatureSigningParametersResolver;
import org.opensaml.security.credential.AbstractCredential;
import org.opensaml.security.credential.BasicCredential;
import org.opensaml.security.credential.Credential;
import org.opensaml.security.credential.UsageType;
import org.opensaml.security.criteria.UsageCriterion;
import org.opensaml.security.x509.BasicX509Credential;
import org.opensaml.xmlsec.SignatureSigningConfiguration;
import org.opensaml.xmlsec.SignatureSigningParameters;
import org.opensaml.xmlsec.config.DefaultSecurityConfigurationBootstrap;
import org.opensaml.xmlsec.context.SecurityParametersContext;
import org.opensaml.xmlsec.criterion.SignatureSigningConfigurationCriterion;
import org.springframework.core.io.FileSystemResource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.security.PrivateKey;
import java.util.ArrayList;
/**
* This is {@link SamlIdPObjectSigner}.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
@Slf4j
@RequiredArgsConstructor
public class SamlIdPObjectSigner {
private final MetadataResolver casSamlIdPMetadataResolver;
private final CasConfigurationProperties casProperties;
private final SamlIdPMetadataLocator samlIdPMetadataLocator;
/**
* Encode a given saml object by invoking a number of outbound security handlers on the context.
*
* @param <T> the type parameter
* @param samlObject the saml object
* @param service the service
* @param adaptor the adaptor
* @param response the response
* @param request the request
* @param binding the binding
* @param authnRequest the authn request
* @return the t
* @throws SamlException the saml exception
*/
@SneakyThrows
public <T extends SAMLObject> T encode(final T samlObject,
final SamlRegisteredService service,
final SamlRegisteredServiceServiceProviderMetadataFacade adaptor,
final HttpServletResponse response,
final HttpServletRequest request,
final String binding,
final RequestAbstractType authnRequest) throws SamlException {
LOGGER.debug("Attempting to encode [{}] for [{}]", samlObject.getClass().getName(), adaptor.getEntityId());
val outboundContext = new MessageContext<T>();
prepareOutboundContext(samlObject, adaptor, outboundContext, binding, authnRequest);
prepareSecurityParametersContext(adaptor, outboundContext, service);
prepareEndpointURLSchemeSecurityHandler(outboundContext);
prepareSamlOutboundDestinationHandler(outboundContext);
prepareSamlOutboundProtocolMessageSigningHandler(outboundContext);
return samlObject;
}
/**
* Prepare saml outbound protocol message signing handler.
*
* @param <T> the type parameter
* @param outboundContext the outbound context
* @throws Exception the exception
*/
protected <T extends SAMLObject> void prepareSamlOutboundProtocolMessageSigningHandler(final MessageContext<T> outboundContext) throws Exception {
LOGGER.debug("Attempting to sign the outbound SAML message...");
val handler = new SAMLOutboundProtocolMessageSigningHandler();
handler.setSignErrorResponses(casProperties.getAuthn().getSamlIdp().getResponse().isSignError());
handler.invoke(outboundContext);
LOGGER.debug("Signed SAML message successfully");
}
/**
* Prepare saml outbound destination handler.
*
* @param <T> the type parameter
* @param outboundContext the outbound context
* @throws Exception the exception
*/
protected <T extends SAMLObject> void prepareSamlOutboundDestinationHandler(final MessageContext<T> outboundContext) throws Exception {
val handlerDest = new SAMLOutboundDestinationHandler();
handlerDest.initialize();
handlerDest.invoke(outboundContext);
}
/**
* Prepare endpoint url scheme security handler.
*
* @param <T> the type parameter
* @param outboundContext the outbound context
* @throws Exception the exception
*/
protected <T extends SAMLObject> void prepareEndpointURLSchemeSecurityHandler(final MessageContext<T> outboundContext) throws Exception {
val handlerEnd = new EndpointURLSchemeSecurityHandler();
handlerEnd.initialize();
handlerEnd.invoke(outboundContext);
}
/**
* Prepare security parameters context.
*
* @param <T> the type parameter
* @param adaptor the adaptor
* @param outboundContext the outbound context
* @param service the service
* @throws SAMLException the saml exception
*/
protected <T extends SAMLObject> void prepareSecurityParametersContext(final SamlRegisteredServiceServiceProviderMetadataFacade adaptor,
final MessageContext<T> outboundContext,
final SamlRegisteredService service) throws SAMLException {
@NonNull
val secParametersContext = outboundContext.getSubcontext(SecurityParametersContext.class, true);
val roleDesc = adaptor.getSsoDescriptor();
val signingParameters = buildSignatureSigningParameters(roleDesc, service);
secParametersContext.setSignatureSigningParameters(signingParameters);
}
/**
* Prepare outbound context.
*
* @param <T> the type parameter
* @param samlObject the saml object
* @param adaptor the adaptor
* @param outboundContext the outbound context
* @param binding the binding
* @param authnRequest the authn request
* @throws SamlException the saml exception
*/
protected <T extends SAMLObject> void prepareOutboundContext(final T samlObject,
final SamlRegisteredServiceServiceProviderMetadataFacade adaptor,
final MessageContext<T> outboundContext,
final String binding,
final RequestAbstractType authnRequest) throws SamlException {
LOGGER.debug("Outbound saml object to use is [{}]", samlObject.getClass().getName());
outboundContext.setMessage(samlObject);
SamlIdPUtils.preparePeerEntitySamlEndpointContext(authnRequest, outboundContext, adaptor, binding);
}
/**
* Build signature signing parameters signature signing parameters.
*
* @param descriptor the descriptor
* @param service the service
* @return the signature signing parameters
*/
@SneakyThrows
protected SignatureSigningParameters buildSignatureSigningParameters(final RoleDescriptor descriptor,
final SamlRegisteredService service) {
val criteria = new CriteriaSet();
val signatureSigningConfiguration = getSignatureSigningConfiguration(descriptor, service);
criteria.add(new SignatureSigningConfigurationCriterion(signatureSigningConfiguration));
criteria.add(new RoleDescriptorCriterion(descriptor));
val resolver = new SAMLMetadataSignatureSigningParametersResolver();
LOGGER.debug("Resolving signature signing parameters for [{}]", descriptor.getElementQName().getLocalPart());
@NonNull
val params = resolver.resolveSingle(criteria);
LOGGER.debug("Created signature signing parameters."
+ "\nSignature algorithm: [{}]"
+ "\nSignature canonicalization algorithm: [{}]"
+ "\nSignature reference digest methods: [{}]",
params.getSignatureAlgorithm(),
params.getSignatureCanonicalizationAlgorithm(),
params.getSignatureReferenceDigestMethod());
return params;
}
/**
* Gets signature signing configuration.
*
* @param roleDescriptor the role descriptor
* @param service the service
* @return the signature signing configuration
* @throws Exception the exception
*/
protected SignatureSigningConfiguration getSignatureSigningConfiguration(final RoleDescriptor roleDescriptor,
final SamlRegisteredService service) throws Exception {
val config = DefaultSecurityConfigurationBootstrap.buildDefaultSignatureSigningConfiguration();
val algs = casProperties.getAuthn().getSamlIdp().getAlgs();
val overrideSignatureReferenceDigestMethods = algs.getOverrideSignatureReferenceDigestMethods();
val overrideSignatureAlgorithms = algs.getOverrideSignatureAlgorithms();
val overrideBlackListedSignatureAlgorithms = algs.getOverrideBlackListedSignatureSigningAlgorithms();
val overrideWhiteListedAlgorithms = algs.getOverrideWhiteListedSignatureSigningAlgorithms();
if (overrideBlackListedSignatureAlgorithms != null && !overrideBlackListedSignatureAlgorithms.isEmpty()) {
config.setBlacklistedAlgorithms(overrideBlackListedSignatureAlgorithms);
}
if (overrideSignatureAlgorithms != null && !overrideSignatureAlgorithms.isEmpty()) {
config.setSignatureAlgorithms(overrideSignatureAlgorithms);
}
if (overrideSignatureReferenceDigestMethods != null && !overrideSignatureReferenceDigestMethods.isEmpty()) {
config.setSignatureReferenceDigestMethods(overrideSignatureReferenceDigestMethods);
}
if (overrideWhiteListedAlgorithms != null && !overrideWhiteListedAlgorithms.isEmpty()) {
config.setWhitelistedAlgorithms(overrideWhiteListedAlgorithms);
}
if (StringUtils.isNotBlank(algs.getOverrideSignatureCanonicalizationAlgorithm())) {
config.setSignatureCanonicalizationAlgorithm(algs.getOverrideSignatureCanonicalizationAlgorithm());
}
LOGGER.debug("Signature signing blacklisted algorithms: [{}]", config.getBlacklistedAlgorithms());
LOGGER.debug("Signature signing signature algorithms: [{}]", config.getSignatureAlgorithms());
LOGGER.debug("Signature signing signature canonicalization algorithm: [{}]", config.getSignatureCanonicalizationAlgorithm());
LOGGER.debug("Signature signing whitelisted algorithms: [{}]", config.getWhitelistedAlgorithms());
LOGGER.debug("Signature signing reference digest methods: [{}]", config.getSignatureReferenceDigestMethods());
val privateKey = getSigningPrivateKey();
val idp = casProperties.getAuthn().getSamlIdp();
val kekCredentialResolver = new MetadataCredentialResolver();
val roleDescriptorResolver = SamlIdPUtils.getRoleDescriptorResolver(casSamlIdPMetadataResolver, idp.getMetadata().isRequireValidMetadata());
kekCredentialResolver.setRoleDescriptorResolver(roleDescriptorResolver);
kekCredentialResolver.setKeyInfoCredentialResolver(DefaultSecurityConfigurationBootstrap.buildBasicInlineKeyInfoCredentialResolver());
kekCredentialResolver.initialize();
val criteriaSet = new CriteriaSet();
criteriaSet.add(new SignatureSigningConfigurationCriterion(config));
criteriaSet.add(new UsageCriterion(UsageType.SIGNING));
criteriaSet.add(new EntityIdCriterion(casProperties.getAuthn().getSamlIdp().getEntityId()));
criteriaSet.add(new EntityRoleCriterion(IDPSSODescriptor.DEFAULT_ELEMENT_NAME));
val credentials = Sets.<Credential>newLinkedHashSet(kekCredentialResolver.resolve(criteriaSet));
val creds = new ArrayList<Credential>();
credentials.forEach(c -> {
val cred = getResolvedSigningCredential(c, privateKey, service);
if (cred != null) {
creds.add(cred);
}
});
config.setSigningCredentials(creds);
LOGGER.debug("Signature signing credentials configured with [{}] credentials", creds.size());
return config;
}
private AbstractCredential getResolvedSigningCredential(final Credential c, final PrivateKey privateKey,
final SamlRegisteredService service) {
val samlIdp = casProperties.getAuthn().getSamlIdp();
try {
val credType = SamlIdPResponseProperties.SignatureCredentialTypes.valueOf(
StringUtils.defaultIfBlank(service.getSigningCredentialType(), samlIdp.getResponse().getCredentialType().name()).toUpperCase());
LOGGER.debug("Requested credential type [{}] is found for service [{}]", credType, service);
switch (credType) {
case BASIC:
LOGGER.debug("Building basic credential signing key [{}] based on requested credential type", credType);
return new BasicCredential(c.getPublicKey(), privateKey);
case X509:
default:
if (c instanceof BasicX509Credential) {
val certificate = BasicX509Credential.class.cast(c).getEntityCertificate();
LOGGER.debug("Locating signature signing certificate from credential [{}]", CertUtils.toString(certificate));
return new BasicX509Credential(certificate, privateKey);
}
val signingCert = samlIdPMetadataLocator.getSigningCertificate();
LOGGER.debug("Locating signature signing certificate file from [{}]", signingCert);
val certificate = SamlUtils.readCertificate(signingCert);
return new BasicX509Credential(certificate, privateKey);
}
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
}
return null;
}
/**
* Gets signing private key.
*
* @return the signing private key
* @throws Exception the exception
*/
protected PrivateKey getSigningPrivateKey() throws Exception {
val samlIdp = casProperties.getAuthn().getSamlIdp();
val signingKey = samlIdPMetadataLocator.getSigningKey();
val privateKeyFactoryBean = new PrivateKeyFactoryBean();
privateKeyFactoryBean.setLocation(new FileSystemResource(signingKey.getFile()));
privateKeyFactoryBean.setAlgorithm(samlIdp.getMetadata().getPrivateKeyAlgName());
privateKeyFactoryBean.setSingleton(false);
LOGGER.debug("Locating signature signing key file from [{}]", signingKey);
return privateKeyFactoryBean.getObject();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URL;
import java.net.InetSocketAddress;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Stack;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.UUID;
import java.util.Calendar;
import java.util.GregorianCalendar;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FSError;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.serializer.SerializationFactory;
import org.apache.hadoop.io.serializer.Serializer;
import org.apache.hadoop.ipc.ProtocolSignature;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.mapred.JobTrackerMetricsInst;
import org.apache.hadoop.mapred.JvmTask;
import org.apache.hadoop.mapred.JobClient.RawSplit;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
/** Implements MapReduce locally, in-process, for debugging. */
public class LocalJobRunner implements JobSubmissionProtocol {
public static final Log LOG =
LogFactory.getLog(LocalJobRunner.class);
private FileSystem fs;
private HashMap<JobID, Job> jobs = new HashMap<JobID, Job>();
private JobConf conf;
private volatile int map_tasks = 0;
private volatile int reduce_tasks = 0;
private JobTrackerInstrumentation myMetrics = null;
private String runnerLogDir;
private static final String jobDir = "localRunner/";
public static final String LOCALHOST = "127.0.0.1";
public static final String LOCAL_RUNNER_SLOTS = "local.job.tracker.slots";
public static final int DEFAULT_LOCAL_RUNNER_SLOTS = 4;
public long getProtocolVersion(String protocol, long clientVersion) {
return JobSubmissionProtocol.versionID;
}
public ProtocolSignature getProtocolSignature(String protocol,
long clientVersion, int clientMethodsHash) throws IOException {
return ProtocolSignature.getProtocolSignature(
this, protocol, clientVersion, clientMethodsHash);
}
private String computeLogDir() {
GregorianCalendar gc = new GregorianCalendar();
return String.format("local_%1$4d%2$02d%3$02d%4$02d%5$02d%5$02d",
gc.get(Calendar.YEAR), gc.get(Calendar.MONTH) + 1, gc
.get(Calendar.DAY_OF_MONTH), gc.get(Calendar.HOUR_OF_DAY), gc
.get(Calendar.MINUTE), gc.get(Calendar.SECOND))
+ "_"
+ UUID.randomUUID().toString();
}
private class Job extends Thread
implements TaskUmbilicalProtocol {
private JobID id;
private JobConf job;
private JobStatus status;
private volatile int numSucceededMaps = 0;
private ArrayList<TaskAttemptID> mapIds = new ArrayList<TaskAttemptID>();
private MapOutputFile mapoutputFile;
private JobProfile profile;
private Path localFile;
private FileSystem localFs;
boolean killed = false;
volatile boolean shutdown = false;
boolean doSequential = true;
// Current counters, including incomplete task(s)
private Map<TaskAttemptID, Counters> currentCounters = new HashMap<TaskAttemptID, Counters>();
public long getProtocolVersion(String protocol, long clientVersion) {
return TaskUmbilicalProtocol.versionID;
}
public ProtocolSignature getProtocolSignature(String protocol,
long clientVersion, int clientMethodsHash) throws IOException {
return ProtocolSignature.getProtocolSignature(
this, protocol, clientVersion, clientMethodsHash);
}
// The semaphore is initialized with the same number as the number of
// threads in the thread pool.
Semaphore slots;
int numSlots;
// Identifier for task.
int taskCounter = 0;
// A thread pool with as many threads as the number of slots.
ExecutorService executor;
private Map<Integer, JVMId> taskJvms = new HashMap<Integer, JVMId>();
private Map<Integer, Task> runningTasks = new HashMap<Integer, Task>();
Server umbilicalServer;
int umbilicalPort;
class TaskRunnable implements Runnable {
private Task task;
int id;
TaskRunnable(Task task, int id) {
this.task = task;
this.id = id;
}
@Override
public void run() {
try {
Vector<String> args = new Vector<String>();
// Use same jvm as parent.
File jvm =
new File(new File(System.getProperty("java.home"), "bin"), "java");
args.add(jvm.toString());
// Add classpath.
String classPath = System.getProperty("java.class.path", "");
classPath += System.getProperty("path.separator") + currentClassPath();
args.add("-classpath");
args.add(classPath);
long logSize = TaskLog.getTaskLogLength(conf);
// Create a log4j directory for the job.
String logDir = new File(
System.getProperty("hadoop.log.dir")).getAbsolutePath() +
Path.SEPARATOR + runnerLogDir +
Path.SEPARATOR + Job.this.id;
LOG.info("Logs for " + task.getTaskID() + " are at " + logDir);
args.add("-Dhadoop.log.dir=" + logDir);
args.add("-Dhadoop.root.logger=INFO,TLA");
args.add("-Dhadoop.tasklog.taskid=" + task.getTaskID().toString());
args.add("-Dhadoop.tasklog.totalLogFileSize=" + logSize);
// For test code.
if (System.getProperty("test.build.data") != null) {
args.add("-Dtest.build.data=" +
System.getProperty("test.build.data"));
}
// Set java options.
String javaOpts = conf.get(JobConf.MAPRED_TASK_JAVA_OPTS,
JobConf.DEFAULT_MAPRED_TASK_JAVA_OPTS);
javaOpts = javaOpts.replace("@taskid@", task.getTaskID().toString());
String [] javaOptsSplit = javaOpts.split(" ");
// Handle java.library.path.
// Do we need current working directory also here?
String libraryPath = System.getProperty("java.library.path");
boolean hasUserLDPath = false;
for(int i=0; i<javaOptsSplit.length ;i++) {
if(javaOptsSplit[i].startsWith("-Djava.library.path=")) {
javaOptsSplit[i] +=
System.getProperty("path.separator") + libraryPath;
hasUserLDPath = true;
break;
}
}
if(!hasUserLDPath && libraryPath != null) {
args.add("-Djava.library.path=" + libraryPath);
}
for (int i = 0; i < javaOptsSplit.length; i++) {
args.add(javaOptsSplit[i]);
}
// Add main class and its arguments.
args.add(LocalChild.class.getName()); // main of Child
args.add(LOCALHOST);
args.add(Integer.toString(Job.this.umbilicalPort));
args.add(task.getTaskID().toString());
args.add(Integer.toString(id));
ProcessBuilder pb = new ProcessBuilder(args);
Process proc = pb.start();
while (!Job.this.shutdown) {
try {
int status = proc.waitFor();
if (status != 0) {
LOG.error("Child for " + task.getTaskID() + " exited with " +
status);
Job.this.statusUpdate(task.getTaskID(), failedStatus(task));
} else {
Job.this.numSucceededMaps++;
}
break;
} catch (InterruptedException ie) {
}
}
} catch (IOException e) {
LOG.error("Launching task " + id + " error " + e);
try {
Job.this.statusUpdate(task.getTaskID(), failedStatus(task));
} catch (IOException ie) {
} catch (InterruptedException inte) {
}
} finally {
Job.this.slots.release();
if (task.isMapTask()) {
LocalJobRunner.this.map_tasks -= 1;
LocalJobRunner.this.myMetrics.completeMap(task.getTaskID());
}
}
}
private String currentClassPath() {
Stack<String> paths = new Stack<String>();
ClassLoader ccl = Thread.currentThread().getContextClassLoader();
while (ccl != null) {
for (URL u: ((URLClassLoader)ccl).getURLs()) {
paths.push(u.getPath());
}
ccl = (URLClassLoader)ccl.getParent();
}
if (!paths.empty()) {
String sep = System.getProperty("path.separator");
StringBuffer appClassPath = new StringBuffer();
while (!paths.empty()) {
if (appClassPath.length() != 0) {
appClassPath.append(sep);
}
appClassPath.append(paths.pop());
}
return appClassPath.toString();
} else {
return "";
}
}
}
public Job(JobID jobid, JobConf conf) throws IOException {
this.doSequential =
conf.getBoolean("mapred.localrunner.sequential", true);
this.id = jobid;
this.mapoutputFile = new MapOutputFile(jobid);
this.mapoutputFile.setConf(conf);
this.localFile = new JobConf(conf).getLocalPath(jobDir+id+".xml");
this.localFs = FileSystem.getLocal(conf);
persistConf(this.localFs, this.localFile, conf);
this.job = new JobConf(localFile);
profile = new JobProfile(job.getUser(), id, localFile.toString(),
"http://localhost:8080/", job.getJobName());
status = new JobStatus(id, 0.0f, 0.0f, JobStatus.RUNNING);
jobs.put(id, this);
numSlots = conf.getInt(LOCAL_RUNNER_SLOTS, DEFAULT_LOCAL_RUNNER_SLOTS);
slots = new Semaphore(numSlots);
executor = Executors.newFixedThreadPool(numSlots);
int handlerCount = conf.getInt("mapred.job.tracker.handler.count",
numSlots);
umbilicalServer =
RPC.getServer(this, LOCALHOST, 0, handlerCount, false, conf);
umbilicalServer.start();
umbilicalPort = umbilicalServer.getListenerAddress().getPort();
this.start();
}
JobProfile getProfile() {
return profile;
}
private void persistConf(FileSystem fs, Path file, JobConf conf)
throws IOException {
new File(file.toUri().getPath()).delete();
FSDataOutputStream out = FileSystem.create(
fs, file, FsPermission.getDefault());
conf.writeXml(out);
out.close();
}
@Override
public void run() {
JobID jobId = profile.getJobID();
JobContext jContext = new JobContext(conf, jobId);
OutputCommitter outputCommitter = job.getOutputCommitter();
try {
// split input into minimum number of splits
RawSplit[] rawSplits;
if (job.getUseNewMapper()) {
org.apache.hadoop.mapreduce.InputFormat<?,?> input =
ReflectionUtils.newInstance(jContext.getInputFormatClass(), jContext.getJobConf());
List<org.apache.hadoop.mapreduce.InputSplit> splits = input.getSplits(jContext);
rawSplits = new RawSplit[splits.size()];
DataOutputBuffer buffer = new DataOutputBuffer();
SerializationFactory factory = new SerializationFactory(conf);
Serializer serializer =
factory.getSerializer(splits.get(0).getClass());
serializer.open(buffer);
for (int i = 0; i < splits.size(); i++) {
buffer.reset();
serializer.serialize(splits.get(i));
RawSplit rawSplit = new RawSplit();
rawSplit.setClassName(splits.get(i).getClass().getName());
rawSplit.setDataLength(splits.get(i).getLength());
rawSplit.setBytes(buffer.getData(), 0, buffer.getLength());
rawSplit.setLocations(splits.get(i).getLocations());
rawSplits[i] = rawSplit;
}
} else {
InputSplit[] splits = job.getInputFormat().getSplits(job, 1);
rawSplits = new RawSplit[splits.length];
DataOutputBuffer buffer = new DataOutputBuffer();
for (int i = 0; i < splits.length; i++) {
buffer.reset();
splits[i].write(buffer);
RawSplit rawSplit = new RawSplit();
rawSplit.setClassName(splits[i].getClass().getName());
rawSplit.setDataLength(splits[i].getLength());
rawSplit.setBytes(buffer.getData(), 0, buffer.getLength());
rawSplit.setLocations(splits[i].getLocations());
rawSplits[i] = rawSplit;
}
}
int numReduceTasks = job.getNumReduceTasks();
if (numReduceTasks > 1 || numReduceTasks < 0) {
// we only allow 0 or 1 reducer in local mode
numReduceTasks = 1;
job.setNumReduceTasks(1);
}
outputCommitter.setupJob(jContext);
status.setSetupProgress(1.0f);
for (int i = 0; i < rawSplits.length; i++) {
if (!this.isInterrupted()) {
TaskAttemptID mapId = new TaskAttemptID(new TaskID(jobId, true, i),0);
mapIds.add(mapId);
Path taskJobFile = job.getLocalPath(jobDir + id + "_" + mapId + ".xml");
MapTask map = new MapTask(taskJobFile.toString(),
mapId, i,
rawSplits[i].getClassName(),
rawSplits[i].getBytes(), 1,
job.getUser());
JobConf localConf = new JobConf(job);
map.localizeConfiguration(localConf);
map.setConf(localConf);
persistConf(this.localFs, taskJobFile, localConf);
map.setJobFile(taskJobFile.toUri().getPath());
map_tasks += 1;
myMetrics.launchMap(mapId);
// Special handling for the single mapper case.
if (this.doSequential) {
map.run(localConf, this);
numSucceededMaps++;
myMetrics.completeMap(mapId);
map_tasks -= 1;
} else {
runTask(map);
}
} else {
throw new InterruptedException();
}
}
// Wait for all maps to be done.
slots.acquire(numSlots);
if (numSucceededMaps < rawSplits.length) {
throw new IOException((rawSplits.length - numSucceededMaps) +
" maps failed");
}
TaskAttemptID reduceId =
new TaskAttemptID(new TaskID(jobId, false, 0), 0);
try {
if (numReduceTasks > 0) {
// move map output to reduce input
for (int i = 0; i < mapIds.size(); i++) {
if (!this.isInterrupted()) {
TaskAttemptID mapId = mapIds.get(i);
Path mapOut = this.mapoutputFile.getOutputFile(mapId);
Path reduceIn = this.mapoutputFile.getInputFileForWrite(
mapId.getTaskID(),reduceId,
localFs.getLength(mapOut));
if (!localFs.mkdirs(reduceIn.getParent())) {
throw new IOException("Mkdirs failed to create "
+ reduceIn.getParent().toString());
}
if (!localFs.rename(mapOut, reduceIn))
throw new IOException("Couldn't rename " + mapOut);
} else {
throw new InterruptedException();
}
}
if (!this.isInterrupted()) {
ReduceTask reduce = new ReduceTask(localFile.toString(),
reduceId, 0, mapIds.size(),
1, job.getUser());
JobConf localConf = new JobConf(job);
reduce.localizeConfiguration(localConf);
reduce.setConf(localConf);
persistConf(this.localFs, this.localFile, localConf);
reduce.setJobFile(localFile.toUri().getPath());
reduce_tasks += 1;
myMetrics.launchReduce(reduce.getTaskID());
reduce.run(localConf, this);
myMetrics.completeReduce(reduce.getTaskID());
reduce_tasks -= 1;
updateCounters(reduce.getTaskID(), reduce.getCounters());
} else {
throw new InterruptedException();
}
}
} finally {
for (TaskAttemptID mapId: mapIds) {
this.mapoutputFile.removeAll(mapId);
}
if (numReduceTasks == 1) {
this.mapoutputFile.removeAll(reduceId);
}
}
// delete the temporary directory in output directory
outputCommitter.commitJob(jContext);
status.setCleanupProgress(1.0f);
if (killed) {
this.status.setRunState(JobStatus.KILLED);
} else {
this.status.setRunState(JobStatus.SUCCEEDED);
}
JobEndNotifier.localRunnerNotification(job, status);
} catch (Throwable t) {
try {
outputCommitter.abortJob(jContext, JobStatus.FAILED);
} catch (IOException ioe) {
LOG.info("Error cleaning up job:" + id);
}
status.setCleanupProgress(1.0f);
if (killed) {
this.status.setRunState(JobStatus.KILLED);
} else {
this.status.setRunState(JobStatus.FAILED);
}
LOG.warn(id, t);
JobEndNotifier.localRunnerNotification(job, status);
} finally {
this.shutdown = true;
executor.shutdownNow();
umbilicalServer.stop();
try {
localFs.delete(localFile, true); // delete local copy
} catch (IOException e) {
LOG.warn("Error cleaning up "+id+": "+e);
}
}
}
/**
* Run the given task asynchronously.
*/
void runTask(Task task) {
try {
slots.acquire();
} catch (InterruptedException e) {
}
JobID jobId = task.getJobID();
boolean isMap = task.isMapTask();
JVMId jvmId = new JVMId(jobId, isMap, taskCounter++);
synchronized(this) {
taskJvms.put(jvmId.getId(), jvmId);
runningTasks.put(jvmId.getId(), task);
}
TaskRunnable taskRunnable = new TaskRunnable(task, jvmId.getId());
executor.execute(taskRunnable);
}
// TaskUmbilicalProtocol methods
public JvmTask getTask(JvmContext context) {
int id = context.jvmId.getId();
synchronized(this) {
Task task = runningTasks.get(id);
if (task != null) {
return new JvmTask(task, false);
} else {
return new JvmTask(null, true);
}
}
}
public boolean statusUpdate(TaskAttemptID taskId, TaskStatus taskStatus)
throws IOException, InterruptedException {
LOG.info(taskStatus.getStateString());
float taskIndex = mapIds.indexOf(taskId);
if (taskIndex >= 0) { // mapping
float numTasks = mapIds.size();
status.setMapProgress(taskIndex/numTasks + taskStatus.getProgress()/numTasks);
} else {
status.setReduceProgress(taskStatus.getProgress());
}
Counters taskCounters = taskStatus.getCounters();
if (taskCounters != null) {
updateCounters(taskId, taskCounters);
}
// ignore phase
return true;
}
/**
* Task is reporting that it is in commit_pending
* and it is waiting for the commit Response
*/
public void commitPending(TaskAttemptID taskid,
TaskStatus taskStatus)
throws IOException, InterruptedException {
statusUpdate(taskid, taskStatus);
}
/**
* Updates counters corresponding to tasks.
*/
private void updateCounters(TaskAttemptID taskId, Counters ctrs) {
synchronized(currentCounters) {
currentCounters.put(taskId, ctrs);
}
}
public void reportDiagnosticInfo(TaskAttemptID taskid, String trace) {
LOG.error("Task diagnostic info for " + taskid + " : " + trace);
}
public void reportNextRecordRange(TaskAttemptID taskid,
SortedRanges.Range range) throws IOException {
LOG.info("Task " + taskid + " reportedNextRecordRange " + range);
}
public boolean ping(TaskAttemptID taskid) throws IOException {
return true;
}
public boolean canCommit(TaskAttemptID taskid)
throws IOException {
return true;
}
public void done(TaskAttemptID taskId) throws IOException {
int taskIndex = mapIds.indexOf(taskId);
if (taskIndex >= 0) { // mapping
status.setMapProgress(1.0f);
} else {
status.setReduceProgress(1.0f);
}
}
public synchronized void fsError(TaskAttemptID taskId, String message)
throws IOException {
LOG.fatal("FSError: "+ message + "from task: " + taskId);
}
public void shuffleError(TaskAttemptID taskId, String message) throws IOException {
LOG.fatal("shuffleError: "+ message + "from task: " + taskId);
}
public synchronized void fatalError(TaskAttemptID taskId, String msg)
throws IOException {
LOG.fatal("Fatal: "+ msg + "from task: " + taskId);
}
public MapTaskCompletionEventsUpdate getMapCompletionEvents(JobID jobId,
int fromEventId, int maxLocs, TaskAttemptID id) throws IOException {
return new MapTaskCompletionEventsUpdate(TaskCompletionEvent.EMPTY_ARRAY,
false);
}
}
public LocalJobRunner(JobConf conf) throws IOException {
this.fs = FileSystem.getLocal(conf);
this.conf = conf;
runnerLogDir = computeLogDir();
myMetrics = new JobTrackerMetricsInst(null, new JobConf(conf));
}
// JobSubmissionProtocol methods
private static int jobid = 0;
public synchronized JobID getNewJobId() {
return new JobID("local", ++jobid);
}
public JobStatus submitJob(JobID jobid) throws IOException {
return new Job(jobid, this.conf).status;
}
public void killJob(JobID id) {
jobs.get(id).killed = true;
jobs.get(id).interrupt();
}
public void setJobPriority(JobID id, String jp) throws IOException {
throw new UnsupportedOperationException("Changing job priority " +
"in LocalJobRunner is not supported.");
}
/** Throws {@link UnsupportedOperationException} */
public boolean killTask(TaskAttemptID taskId, boolean shouldFail) throws IOException {
throw new UnsupportedOperationException("Killing tasks in " +
"LocalJobRunner is not supported");
}
public JobProfile getJobProfile(JobID id) {
Job job = jobs.get(id);
if(job != null)
return job.getProfile();
else
return null;
}
public TaskReport[] getMapTaskReports(JobID id) {
return new TaskReport[0];
}
public TaskReport[] getReduceTaskReports(JobID id) {
return new TaskReport[0];
}
public TaskReport[] getCleanupTaskReports(JobID id) {
return new TaskReport[0];
}
public TaskReport[] getSetupTaskReports(JobID id) {
return new TaskReport[0];
}
public JobStatus getJobStatus(JobID id) {
Job job = jobs.get(id);
if(job != null)
return job.status;
else
return null;
}
public Counters getJobCounters(JobID id) {
Job job = jobs.get(id);
Counters total = new Counters();
synchronized(job.currentCounters) {
for (Counters ctrs: job.currentCounters.values()) {
total = Counters.sum(total, ctrs);
}
}
return total;
}
public String getFilesystemName() throws IOException {
return fs.getUri().toString();
}
public ClusterStatus getClusterStatus(boolean detailed) {
return new ClusterStatus(1, 0, 0, map_tasks, reduce_tasks, 1, 1,
JobTracker.State.RUNNING);
}
public JobStatus[] jobsToComplete() {return null;}
public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobid
, int fromEventId, int maxEvents) throws IOException {
return TaskCompletionEvent.EMPTY_ARRAY;
}
public JobStatus[] getAllJobs() {return null;}
/**
* Returns the diagnostic information for a particular task in the given job.
* To be implemented
*/
public String[] getTaskDiagnostics(TaskAttemptID taskid)
throws IOException{
return new String [0];
}
/**
* @see org.apache.hadoop.mapred.JobSubmissionProtocol#getSystemDir()
*/
public String getSystemDir() {
Path sysDir = new Path(conf.get("mapred.system.dir", "/tmp/hadoop/mapred/system"));
return fs.makeQualified(sysDir).toString();
}
@Override
public JobStatus[] getJobsFromQueue(String queue) throws IOException {
return null;
}
@Override
public JobQueueInfo[] getQueues() throws IOException {
return null;
}
@Override
public JobQueueInfo getQueueInfo(String queue) throws IOException {
return null;
}
@Override
public QueueAclsInfo[] getQueueAclsForCurrentUser() throws IOException{
return null;
}
public static class LocalChild {
public static void main(String[] args) throws Throwable {
JobConf defaultConf = new JobConf();
String host = args[0];
int port = Integer.parseInt(args[1]);
InetSocketAddress address = new InetSocketAddress(host, port);
final TaskAttemptID firstTaskid = TaskAttemptID.forName(args[2]);
final int SLEEP_LONGER_COUNT = 5;
int jvmIdInt = Integer.parseInt(args[3]);
JVMId jvmId = new JVMId(firstTaskid.getJobID(),firstTaskid.isMap(),jvmIdInt);
TaskUmbilicalProtocol umbilical =
(TaskUmbilicalProtocol)RPC.getProxy(TaskUmbilicalProtocol.class,
TaskUmbilicalProtocol.versionID,
address,
defaultConf);
String pid = "NONE";
JvmContext context = new JvmContext(jvmId, pid);
Task task = null;
try {
JvmTask myTask = umbilical.getTask(context);
task = myTask.getTask();
if (myTask.shouldDie() || task == null) {
LOG.error("Returning from local child");
System.exit(1);
}
JobConf job = new JobConf(task.getJobFile());
File userLogsDir = TaskLog.getBaseDir(task.getTaskID().toString());
userLogsDir.mkdirs();
System.setOut(new PrintStream(new FileOutputStream(
new File(userLogsDir, "stdout"))));
System.setErr(new PrintStream(new FileOutputStream(
new File(userLogsDir, "stderr"))));
task.setConf(job);
task.run(job, umbilical); // run the task
} catch (Exception exception) {
LOG.error("Got exception " + StringUtils.stringifyException(exception));
try {
if (task != null) {
umbilical.statusUpdate(task.getTaskID(), failedStatus(task));
// do cleanup for the task
task.taskCleanup(umbilical);
}
} catch (Exception e) {
}
System.exit(2);
} catch (Throwable throwable) {
LOG.error("Got throwable " + throwable);
if (task != null) {
Throwable tCause = throwable.getCause();
String cause = tCause == null
? throwable.getMessage()
: StringUtils.stringifyException(tCause);
umbilical.fatalError(task.getTaskID(), cause);
}
System.exit(3);
} finally {
RPC.stopProxy(umbilical);
}
}
}
static TaskStatus failedStatus(Task task) {
TaskStatus taskStatus = (TaskStatus) task.taskStatus.clone();
taskStatus.setRunState(TaskStatus.State.FAILED);
return taskStatus;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.dataformat;
import org.apache.flink.core.memory.MemorySegment;
import org.apache.flink.core.memory.MemorySegmentFactory;
import org.apache.flink.table.util.SegmentsUtil;
import static org.apache.flink.table.dataformat.BinaryRow.calculateBitSetWidthInBytes;
import static org.apache.flink.util.Preconditions.checkArgument;
/**
* Its memory storage structure and {@link BinaryRow} exactly the same, the only different is it supports
* all bytes in variable MemorySegments.
*/
public final class NestedRow extends BinaryFormat implements BaseRow {
private final int arity;
private final int nullBitsSizeInBytes;
public NestedRow(int arity) {
checkArgument(arity >= 0);
this.arity = arity;
this.nullBitsSizeInBytes = calculateBitSetWidthInBytes(arity);
}
static NestedRow readNestedRowFieldFromSegments(
MemorySegment[] segments, int numFields, int baseOffset, long offsetAndSize) {
final int size = ((int) offsetAndSize);
int offset = (int) (offsetAndSize >> 32);
NestedRow row = new NestedRow(numFields);
row.pointTo(segments, offset + baseOffset, size);
return row;
}
private int getFieldOffset(int pos) {
return offset + nullBitsSizeInBytes + pos * 8;
}
private void assertIndexIsValid(int index) {
assert index >= 0 : "index (" + index + ") should >= 0";
assert index < arity : "index (" + index + ") should < " + arity;
}
@Override
public int getArity() {
return arity;
}
@Override
public byte getHeader() {
return SegmentsUtil.getByte(segments, offset);
}
@Override
public void setHeader(byte header) {
SegmentsUtil.setByte(segments, offset, header);
}
private void setNotNullAt(int i) {
assertIndexIsValid(i);
SegmentsUtil.bitUnSet(segments, offset, i + 8);
}
/**
* See {@link BinaryRow#setNullAt(int)}.
*/
@Override
public void setNullAt(int i) {
assertIndexIsValid(i);
SegmentsUtil.bitSet(segments, offset, i + 8);
SegmentsUtil.setLong(segments, getFieldOffset(i), 0);
}
@Override
public void setInt(int pos, int value) {
assertIndexIsValid(pos);
setNotNullAt(pos);
SegmentsUtil.setInt(segments, getFieldOffset(pos), value);
}
@Override
public void setLong(int pos, long value) {
assertIndexIsValid(pos);
setNotNullAt(pos);
SegmentsUtil.setLong(segments, getFieldOffset(pos), value);
}
@Override
public void setDouble(int pos, double value) {
assertIndexIsValid(pos);
setNotNullAt(pos);
SegmentsUtil.setDouble(segments, getFieldOffset(pos), value);
}
@Override
public void setDecimal(int pos, Decimal value, int precision) {
assertIndexIsValid(pos);
if (Decimal.isCompact(precision)) {
// compact format
setLong(pos, value.toUnscaledLong());
} else {
int fieldOffset = getFieldOffset(pos);
int cursor = (int) (SegmentsUtil.getLong(segments, fieldOffset) >>> 32);
assert cursor > 0 : "invalid cursor " + cursor;
// zero-out the bytes
SegmentsUtil.setLong(segments, offset + cursor, 0L);
SegmentsUtil.setLong(segments, offset + cursor + 8, 0L);
if (value == null) {
setNullAt(pos);
// keep the offset for future update
SegmentsUtil.setLong(segments, fieldOffset, ((long) cursor) << 32);
} else {
byte[] bytes = value.toUnscaledBytes();
assert (bytes.length <= 16);
// Write the bytes to the variable length portion.
SegmentsUtil.copyFromBytes(segments, offset + cursor, bytes, 0, bytes.length);
setLong(pos, ((long) cursor << 32) | ((long) bytes.length));
}
}
}
@Override
public void setBoolean(int pos, boolean value) {
assertIndexIsValid(pos);
setNotNullAt(pos);
SegmentsUtil.setBoolean(segments, getFieldOffset(pos), value);
}
@Override
public void setShort(int pos, short value) {
assertIndexIsValid(pos);
setNotNullAt(pos);
SegmentsUtil.setShort(segments, getFieldOffset(pos), value);
}
@Override
public void setByte(int pos, byte value) {
assertIndexIsValid(pos);
setNotNullAt(pos);
SegmentsUtil.setByte(segments, getFieldOffset(pos), value);
}
@Override
public void setFloat(int pos, float value) {
assertIndexIsValid(pos);
setNotNullAt(pos);
SegmentsUtil.setFloat(segments, getFieldOffset(pos), value);
}
@Override
public boolean isNullAt(int pos) {
assertIndexIsValid(pos);
return SegmentsUtil.bitGet(segments, offset, pos + 8);
}
@Override
public boolean getBoolean(int pos) {
assertIndexIsValid(pos);
return SegmentsUtil.getBoolean(segments, getFieldOffset(pos));
}
@Override
public byte getByte(int pos) {
assertIndexIsValid(pos);
return SegmentsUtil.getByte(segments, getFieldOffset(pos));
}
@Override
public short getShort(int pos) {
assertIndexIsValid(pos);
return SegmentsUtil.getShort(segments, getFieldOffset(pos));
}
@Override
public int getInt(int pos) {
assertIndexIsValid(pos);
return SegmentsUtil.getInt(segments, getFieldOffset(pos));
}
@Override
public long getLong(int pos) {
assertIndexIsValid(pos);
return SegmentsUtil.getLong(segments, getFieldOffset(pos));
}
@Override
public float getFloat(int pos) {
assertIndexIsValid(pos);
return SegmentsUtil.getFloat(segments, getFieldOffset(pos));
}
@Override
public double getDouble(int pos) {
assertIndexIsValid(pos);
return SegmentsUtil.getDouble(segments, getFieldOffset(pos));
}
@Override
public BinaryString getString(int pos) {
assertIndexIsValid(pos);
int fieldOffset = getFieldOffset(pos);
final long offsetAndLen = segments[0].getLong(fieldOffset);
return BinaryString.readBinaryStringFieldFromSegments(segments, offset, fieldOffset, offsetAndLen);
}
@Override
public Decimal getDecimal(int pos, int precision, int scale) {
assertIndexIsValid(pos);
if (Decimal.isCompact(precision)) {
return Decimal.fromUnscaledLong(precision, scale,
SegmentsUtil.getLong(segments, getFieldOffset(pos)));
}
int fieldOffset = getFieldOffset(pos);
final long offsetAndSize = SegmentsUtil.getLong(segments, fieldOffset);
return Decimal.readDecimalFieldFromSegments(segments, offset, offsetAndSize, precision, scale);
}
@Override
public <T> BinaryGeneric<T> getGeneric(int pos) {
assertIndexIsValid(pos);
return BinaryGeneric.readBinaryGenericFieldFromSegments(segments, offset, getLong(pos));
}
@Override
public byte[] getBinary(int pos) {
assertIndexIsValid(pos);
int fieldOffset = getFieldOffset(pos);
final long offsetAndLen = segments[0].getLong(fieldOffset);
return readBinaryFieldFromSegments(segments, offset, fieldOffset, offsetAndLen);
}
@Override
public BaseRow getRow(int pos, int numFields) {
assertIndexIsValid(pos);
return NestedRow.readNestedRowFieldFromSegments(segments, numFields, offset, getLong(pos));
}
@Override
public BaseArray getArray(int pos) {
assertIndexIsValid(pos);
return BinaryArray.readBinaryArrayFieldFromSegments(segments, offset, getLong(pos));
}
@Override
public BaseMap getMap(int pos) {
assertIndexIsValid(pos);
return BinaryMap.readBinaryMapFieldFromSegments(segments, offset, getLong(pos));
}
public NestedRow copy() {
return copy(new NestedRow(arity));
}
public NestedRow copy(BaseRow reuse) {
return copyInternal((NestedRow) reuse);
}
private NestedRow copyInternal(NestedRow reuse) {
byte[] bytes = SegmentsUtil.copyToBytes(segments, offset, sizeInBytes);
reuse.pointTo(MemorySegmentFactory.wrap(bytes), 0, sizeInBytes);
return reuse;
}
@Override
public int hashCode() {
return SegmentsUtil.hashByWords(segments, offset, sizeInBytes);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.guacamole.auth.mysql;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import org.apache.guacamole.GuacamoleException;
import org.apache.guacamole.auth.jdbc.JDBCEnvironment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.guacamole.auth.jdbc.security.PasswordPolicy;
import org.apache.ibatis.exceptions.PersistenceException;
import org.apache.ibatis.session.SqlSession;
/**
* A MySQL-specific implementation of JDBCEnvironment provides database
* properties specifically for MySQL.
*/
public class MySQLEnvironment extends JDBCEnvironment {
/**
* Logger for this class.
*/
private static final Logger logger = LoggerFactory.getLogger(MySQLEnvironment.class);
/**
* The earliest version of MariaDB that supported recursive CTEs.
*/
private static final MySQLVersion MARIADB_SUPPORTS_CTE = new MySQLVersion(10, 2, 2, true);
/**
* The earliest version of MySQL that supported recursive CTEs.
*/
private static final MySQLVersion MYSQL_SUPPORTS_CTE = new MySQLVersion(8, 0, 1, false);
/**
* The default host to connect to, if MYSQL_HOSTNAME is not specified.
*/
private static final String DEFAULT_HOSTNAME = "localhost";
/**
* The default port to connect to, if MYSQL_PORT is not specified.
*/
private static final int DEFAULT_PORT = 3306;
/**
* Whether a database user account is required by default for authentication
* to succeed.
*/
private static final boolean DEFAULT_USER_REQUIRED = false;
/**
* The default value for the maximum number of connections to be
* allowed to the Guacamole server overall.
*/
private final int DEFAULT_ABSOLUTE_MAX_CONNECTIONS = 0;
/**
* The default value for the default maximum number of connections to be
* allowed per user to any one connection.
*/
private final int DEFAULT_MAX_CONNECTIONS_PER_USER = 1;
/**
* The default value for the default maximum number of connections to be
* allowed per user to any one connection group.
*/
private final int DEFAULT_MAX_GROUP_CONNECTIONS_PER_USER = 1;
/**
* The default value for the default maximum number of connections to be
* allowed to any one connection.
*/
private final int DEFAULT_MAX_CONNECTIONS = 0;
/**
* The default value for the default maximum number of connections to be
* allowed to any one connection group.
*/
private final int DEFAULT_MAX_GROUP_CONNECTIONS = 0;
/**
* Constructs a new MySQLEnvironment, providing access to MySQL-specific
* configuration options.
*
* @throws GuacamoleException
* If an error occurs while setting up the underlying JDBCEnvironment
* or while parsing legacy MySQL configuration options.
*/
public MySQLEnvironment() throws GuacamoleException {
// Init underlying JDBC environment
super();
}
@Override
public boolean isUserRequired() throws GuacamoleException {
return getProperty(
MySQLGuacamoleProperties.MYSQL_USER_REQUIRED,
DEFAULT_USER_REQUIRED
);
}
@Override
public int getAbsoluteMaxConnections() throws GuacamoleException {
return getProperty(MySQLGuacamoleProperties.MYSQL_ABSOLUTE_MAX_CONNECTIONS,
DEFAULT_ABSOLUTE_MAX_CONNECTIONS
);
}
@Override
public int getDefaultMaxConnections() throws GuacamoleException {
return getProperty(
MySQLGuacamoleProperties.MYSQL_DEFAULT_MAX_CONNECTIONS,
DEFAULT_MAX_CONNECTIONS
);
}
@Override
public int getDefaultMaxGroupConnections() throws GuacamoleException {
return getProperty(
MySQLGuacamoleProperties.MYSQL_DEFAULT_MAX_GROUP_CONNECTIONS,
DEFAULT_MAX_GROUP_CONNECTIONS
);
}
@Override
public int getDefaultMaxConnectionsPerUser() throws GuacamoleException {
return getProperty(
MySQLGuacamoleProperties.MYSQL_DEFAULT_MAX_CONNECTIONS_PER_USER,
DEFAULT_MAX_CONNECTIONS_PER_USER
);
}
@Override
public int getDefaultMaxGroupConnectionsPerUser() throws GuacamoleException {
return getProperty(
MySQLGuacamoleProperties.MYSQL_DEFAULT_MAX_GROUP_CONNECTIONS_PER_USER,
DEFAULT_MAX_GROUP_CONNECTIONS_PER_USER
);
}
@Override
public PasswordPolicy getPasswordPolicy() {
return new MySQLPasswordPolicy(this);
}
/**
* Returns the hostname of the MySQL server hosting the Guacamole
* authentication tables. If unspecified, this will be "localhost".
*
* @return
* The URL of the MySQL server.
*
* @throws GuacamoleException
* If an error occurs while retrieving the property value.
*/
public String getMySQLHostname() throws GuacamoleException {
return getProperty(
MySQLGuacamoleProperties.MYSQL_HOSTNAME,
DEFAULT_HOSTNAME
);
}
/**
* Returns the port number of the MySQL server hosting the Guacamole
* authentication tables. If unspecified, this will be the default MySQL
* port of 3306.
*
* @return
* The port number of the MySQL server.
*
* @throws GuacamoleException
* If an error occurs while retrieving the property value.
*/
public int getMySQLPort() throws GuacamoleException {
return getProperty(MySQLGuacamoleProperties.MYSQL_PORT, DEFAULT_PORT);
}
/**
* Returns the name of the MySQL database containing the Guacamole
* authentication tables.
*
* @return
* The name of the MySQL database.
*
* @throws GuacamoleException
* If an error occurs while retrieving the property value, or if the
* value was not set, as this property is required.
*/
public String getMySQLDatabase() throws GuacamoleException {
return getRequiredProperty(MySQLGuacamoleProperties.MYSQL_DATABASE);
}
/**
* Returns the username that should be used when authenticating with the
* MySQL database containing the Guacamole authentication tables.
*
* @return
* The username for the MySQL database.
*
* @throws GuacamoleException
* If an error occurs while retrieving the property value, or if the
* value was not set, as this property is required.
*/
public String getMySQLUsername() throws GuacamoleException {
return getRequiredProperty(MySQLGuacamoleProperties.MYSQL_USERNAME);
}
/**
* Returns the password that should be used when authenticating with the
* MySQL database containing the Guacamole authentication tables.
*
* @return
* The password for the MySQL database.
*
* @throws GuacamoleException
* If an error occurs while retrieving the property value, or if the
* value was not set, as this property is required.
*/
public String getMySQLPassword() throws GuacamoleException {
return getRequiredProperty(MySQLGuacamoleProperties.MYSQL_PASSWORD);
}
@Override
public boolean isRecursiveQuerySupported(SqlSession session) {
// Retrieve database version string from JDBC connection
String versionString;
try {
Connection connection = session.getConnection();
DatabaseMetaData metaData = connection.getMetaData();
versionString = metaData.getDatabaseProductVersion();
}
catch (SQLException e) {
throw new PersistenceException("Cannot determine whether "
+ "MySQL / MariaDB supports recursive queries.", e);
}
try {
// Parse MySQL / MariaDB version from version string
MySQLVersion version = new MySQLVersion(versionString);
logger.debug("Database recognized as {}.", version);
// Recursive queries are supported for MariaDB 10.2.2+ and
// MySQL 8.0.1+
return version.isAtLeast(MARIADB_SUPPORTS_CTE)
|| version.isAtLeast(MYSQL_SUPPORTS_CTE);
}
catch (IllegalArgumentException e) {
logger.debug("Unrecognized MySQL / MariaDB version string: "
+ "\"{}\". Assuming database engine does not support "
+ "recursive queries.", session);
return false;
}
}
}
| |
package com.sequenceiq.freeipa.service.freeipa.user.model;
import static java.util.Objects.requireNonNull;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import com.sequenceiq.freeipa.client.FreeIpaChecks;
import com.sequenceiq.freeipa.service.freeipa.user.UserSyncConstants;
public class UsersStateDifference {
private static final Logger LOGGER = LoggerFactory.getLogger(UsersStateDifference.class);
private final ImmutableSet<FmsGroup> groupsToAdd;
private final ImmutableSet<FmsGroup> groupsToRemove;
private final ImmutableSet<FmsUser> usersToAdd;
private final ImmutableSet<String> usersWithCredentialsToUpdate;
private final ImmutableSet<String> usersToRemove;
private final ImmutableSet<String> usersToDisable;
private final ImmutableSet<String> usersToEnable;
private final ImmutableMultimap<String, String> groupMembershipToAdd;
private final ImmutableMultimap<String, String> groupMembershipToRemove;
@SuppressWarnings("checkstyle:ExecutableStatementCount")
public UsersStateDifference(ImmutableSet<FmsGroup> groupsToAdd, ImmutableSet<FmsGroup> groupsToRemove,
ImmutableSet<FmsUser> usersToAdd, ImmutableSet<String> usersWithCredentialsToUpdate, ImmutableSet<String> usersToRemove,
ImmutableMultimap<String, String> groupMembershipToAdd, ImmutableMultimap<String, String> groupMembershipToRemove,
ImmutableSet<String> usersToDisable, ImmutableSet<String> usersToEnable) {
this.groupsToAdd = requireNonNull(groupsToAdd);
this.groupsToRemove = requireNonNull(groupsToRemove);
this.usersToAdd = requireNonNull(usersToAdd);
this.usersWithCredentialsToUpdate = requireNonNull(usersWithCredentialsToUpdate);
this.usersToRemove = requireNonNull(usersToRemove);
this.groupMembershipToAdd = requireNonNull(groupMembershipToAdd);
this.groupMembershipToRemove = requireNonNull(groupMembershipToRemove);
this.usersToDisable = requireNonNull(usersToDisable);
this.usersToEnable = requireNonNull(usersToEnable);
}
public ImmutableSet<FmsGroup> getGroupsToAdd() {
return groupsToAdd;
}
public ImmutableSet<FmsGroup> getGroupsToRemove() {
return groupsToRemove;
}
public ImmutableSet<FmsUser> getUsersToAdd() {
return usersToAdd;
}
public ImmutableSet<String> getUsersWithCredentialsToUpdate() {
return usersWithCredentialsToUpdate;
}
public ImmutableSet<String> getUsersToRemove() {
return usersToRemove;
}
public ImmutableMultimap<String, String> getGroupMembershipToAdd() {
return groupMembershipToAdd;
}
public ImmutableMultimap<String, String> getGroupMembershipToRemove() {
return groupMembershipToRemove;
}
public ImmutableSet<String> getUsersToDisable() {
return usersToDisable;
}
public ImmutableSet<String> getUsersToEnable() {
return usersToEnable;
}
@Override
public String toString() {
return "UsersStateDifference{"
+ "groupsToAdd=" + groupsToAdd
+ ", groupsToRemove=" + groupsToRemove
+ ", usersToAdd=" + usersToAdd
+ ", usersWithCredentialsToUpdate=" + usersWithCredentialsToUpdate
+ ", usersToRemove=" + usersToRemove
+ ", groupMembershipToAdd=" + groupMembershipToAdd
+ ", groupMembershipToRemove=" + groupMembershipToRemove
+ ", usersToDisable=" + usersToDisable
+ ", usersToEnable=" + usersToEnable
+ '}';
}
public static UsersStateDifference fromUmsAndIpaUsersStates(UmsUsersState umsState, UsersState ipaState, UserSyncOptions options) {
return new UsersStateDifference(
calculateGroupsToAdd(umsState, ipaState),
calculateGroupsToRemove(umsState, ipaState),
calculateUsersToAdd(umsState, ipaState),
calculateUsersWithCredentialsToUpdate(umsState, ipaState, options.isCredentialsUpdateOptimizationEnabled()),
calculateUsersToRemove(umsState, ipaState),
calculateGroupMembershipToAdd(umsState, ipaState),
calculateGroupMembershipToRemove(umsState, ipaState),
calculateUsersToDisable(umsState, ipaState),
calculateUsersToEnable(umsState, ipaState));
}
public static UsersStateDifference forDeletedUser(String deletedUser, Collection<String> groupMembershipsToRemove) {
Multimap<String, String> groupMembershipsToRemoveMap = HashMultimap.create();
groupMembershipsToRemoveMap.putAll(deletedUser, groupMembershipsToRemove);
return new UsersStateDifference(
ImmutableSet.of(),
ImmutableSet.of(),
ImmutableSet.of(),
ImmutableSet.of(),
ImmutableSet.of(deletedUser),
ImmutableMultimap.of(),
ImmutableMultimap.copyOf(groupMembershipsToRemoveMap),
ImmutableSet.of(),
ImmutableSet.of());
}
public static ImmutableSet<FmsUser> calculateUsersToAdd(UmsUsersState umsState, UsersState ipaState) {
Map<String, FmsUser> umsUsers = umsState.getUsersState().getUsers().stream()
.collect(Collectors.toMap(FmsUser::getName, Function.identity()));
Set<String> ipaUsers = ipaState.getUsers().stream()
.map(FmsUser::getName)
.collect(Collectors.toSet());
ImmutableSet<FmsUser> usersToAdd = ImmutableSet.copyOf(Sets.difference(umsUsers.keySet(), ipaUsers)
.stream()
.filter(username -> !FreeIpaChecks.IPA_PROTECTED_USERS.contains(username))
.map(username -> umsUsers.get(username))
.collect(Collectors.toSet()));
LOGGER.info("usersToAdd size = {}", usersToAdd.size());
LOGGER.debug("userToAdd = {}", usersToAdd.stream().map(FmsUser::getName).collect(Collectors.toSet()));
return usersToAdd;
}
public static ImmutableSet<String> calculateUsersToDisable(UmsUsersState umsState, UsersState ipaState) {
ImmutableSet<String> usersToDisable = calculateUsersWithDifferingState(umsState, ipaState, FmsUser.State.DISABLED);
LOGGER.info("usersToDisable size = {}", usersToDisable.size());
LOGGER.debug("userToDisable = {}", usersToDisable);
return usersToDisable;
}
public static ImmutableSet<String> calculateUsersToEnable(UmsUsersState umsState, UsersState ipaState) {
ImmutableSet<String> usersToEnable = calculateUsersWithDifferingState(umsState, ipaState, FmsUser.State.ENABLED);
LOGGER.info("usersToEnable size = {}", usersToEnable.size());
LOGGER.debug("userToEnable = {}", usersToEnable);
return usersToEnable;
}
private static ImmutableSet<String> calculateUsersWithDifferingState(
UmsUsersState umsState, UsersState ipaState, FmsUser.State state) {
Map<String, FmsUser> existingIpaUsers = ipaState.getUsers().stream()
.collect(Collectors.toMap(FmsUser::getName, Function.identity()));
return ImmutableSet.copyOf(umsState.getUsersState().getUsers().stream()
.filter(u -> u.getState() == state &&
existingIpaUsers.containsKey(u.getName()) &&
existingIpaUsers.get(u.getName()).getState() != state)
.map(FmsUser::getName)
.filter(username -> !FreeIpaChecks.IPA_PROTECTED_USERS.contains(username))
.collect(Collectors.toSet()));
}
public static ImmutableSet<String> calculateUsersWithCredentialsToUpdate(UmsUsersState umsState, UsersState ipaState,
boolean credentialsUpdateOptimizationEnabled) {
ImmutableSet<String> usersWithCredentialsToUpdate = credentialsUpdateOptimizationEnabled ?
getUsersWithStaleCredentials(umsState, ipaState) : getAllUsers(umsState);
LOGGER.info("usersWithCredentialsToUpdate size = {}", usersWithCredentialsToUpdate.size());
LOGGER.debug("usersWithCredentialsToUpdate = {}", usersWithCredentialsToUpdate);
return usersWithCredentialsToUpdate;
}
public static ImmutableSet<String> calculateUsersToRemove(UmsUsersState umsState, UsersState ipaState) {
Collection<String> umsStateUsers = umsState.getUsersState().getGroupMembership().get(UserSyncConstants.CDP_USERSYNC_INTERNAL_GROUP);
Collection<String> ipaStateUsers = ipaState.getGroupMembership().get(UserSyncConstants.CDP_USERSYNC_INTERNAL_GROUP);
ImmutableSet<String> usersToRemove = ImmutableSet.copyOf(ipaStateUsers.stream()
.filter(ipaUser -> !umsStateUsers.contains(ipaUser))
.filter(ipaUser -> !FreeIpaChecks.IPA_PROTECTED_USERS.contains(ipaUser))
.collect(Collectors.toSet()));
LOGGER.info("usersToRemove size = {}", usersToRemove.size());
LOGGER.debug("usersToRemove = {}", usersToRemove);
return usersToRemove;
}
public static ImmutableSet<FmsGroup> calculateGroupsToAdd(UmsUsersState umsState, UsersState ipaState) {
ImmutableSet<FmsGroup> groupsToAdd = ImmutableSet.copyOf(Sets.difference(umsState.getUsersState().getGroups(), ipaState.getGroups())
.stream()
.filter(fmsGroup -> !FreeIpaChecks.IPA_PROTECTED_GROUPS.contains(fmsGroup.getName()))
.collect(Collectors.toSet()));
LOGGER.info("groupsToAdd size = {}", groupsToAdd.size());
LOGGER.debug("groupsToAdd = {}", groupsToAdd.stream().map(FmsGroup::getName).collect(Collectors.toSet()));
return groupsToAdd;
}
public static ImmutableSet<FmsGroup> calculateGroupsToRemove(UmsUsersState umsState, UsersState ipaState) {
Set<FmsGroup> allControlPlaneGroups = Sets.union(umsState.getWorkloadAdministrationGroups(), umsState.getUsersState().getGroups());
ImmutableSet<FmsGroup> groupsToRemove = ImmutableSet.copyOf(Sets.difference(ipaState.getGroups(), allControlPlaneGroups)
.stream()
.filter(fmsGroup -> !FreeIpaChecks.IPA_PROTECTED_GROUPS.contains(fmsGroup.getName()))
.collect(Collectors.toSet()));
LOGGER.info("groupsToRemove size = {}", groupsToRemove.size());
LOGGER.debug("groupsToRemove = {}", groupsToRemove.stream().map(FmsGroup::getName).collect(Collectors.toSet()));
return groupsToRemove;
}
public static ImmutableMultimap<String, String> calculateGroupMembershipToAdd(UmsUsersState umsState, UsersState ipaState) {
Multimap<String, String> groupMembershipToAdd = HashMultimap.create();
umsState.getUsersState().getGroupMembership().forEach((group, user) -> {
if (!FreeIpaChecks.IPA_UNMANAGED_GROUPS.contains(group) && !ipaState.getGroupMembership().containsEntry(group, user)) {
LOGGER.debug("adding user : {} to group : {}", user, group);
groupMembershipToAdd.put(group, user);
}
});
LOGGER.info("groupMembershipToAdd size = {}", groupMembershipToAdd.size());
LOGGER.debug("groupMembershipToAdd = {}", groupMembershipToAdd.asMap());
return ImmutableMultimap.copyOf(groupMembershipToAdd);
}
public static ImmutableMultimap<String, String> calculateGroupMembershipToRemove(UmsUsersState umsState, UsersState ipaState) {
Multimap<String, String> groupMembershipToRemove = HashMultimap.create();
ipaState.getGroupMembership().forEach((group, user) -> {
if (!FreeIpaChecks.IPA_UNMANAGED_GROUPS.contains(group) && !umsState.getUsersState().getGroupMembership().containsEntry(group, user)) {
LOGGER.debug("removing user : {} to group : {}", user, group);
groupMembershipToRemove.put(group, user);
}
});
LOGGER.info("groupMembershipToRemove size = {}", groupMembershipToRemove.size());
LOGGER.debug("groupMembershipToRemove = {}", groupMembershipToRemove.asMap());
return ImmutableMultimap.copyOf(groupMembershipToRemove);
}
private static ImmutableSet<String> getAllUsers(UmsUsersState umsState) {
return umsState.getUsersState().getUsers().stream()
.map(FmsUser::getName)
.filter(username -> !FreeIpaChecks.IPA_PROTECTED_USERS.contains(username))
.collect(ImmutableSet.toImmutableSet());
}
private static ImmutableSet<String> getUsersWithStaleCredentials(UmsUsersState umsState, UsersState ipaState) {
return umsState.getUsersState().getUsers().stream()
.map(FmsUser::getName)
.filter(username -> !FreeIpaChecks.IPA_PROTECTED_USERS.contains(username) && credentialsAreStale(username, umsState, ipaState))
.collect(ImmutableSet.toImmutableSet());
}
private static boolean credentialsAreStale(String username, UmsUsersState umsState, UsersState ipaState) {
UserMetadata ipaUserMetadata = ipaState.getUserMetadataMap().get(username);
if (ipaUserMetadata != null) {
WorkloadCredential umsCredential = umsState.getUsersWorkloadCredentialMap().get(username);
return ipaUserMetadata.getWorkloadCredentialsVersion() < umsCredential.getVersion();
}
return true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.runners.dataflow;
import static org.apache.beam.sdk.util.CoderUtils.encodeToByteArray;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
import javax.annotation.Nullable;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.DelegateCoder;
import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.coders.VarIntCoder;
import org.apache.beam.sdk.io.UnboundedSource;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.values.KV;
import org.joda.time.Instant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An unbounded source for testing the unbounded sources framework code.
*
* <p>Each split of this sources produces records of the form KV(split_id, i),
* where i counts up from 0. Each record has a timestamp of i, and the watermark
* accurately tracks these timestamps. The reader will occasionally return false
* from {@code advance}, in order to simulate a source where not all the data is
* available immediately.
*/
public class TestCountingSource
extends UnboundedSource<KV<Integer, Integer>, TestCountingSource.CounterMark> {
private static final Logger LOG = LoggerFactory.getLogger(TestCountingSource.class);
private static List<Integer> finalizeTracker;
private final int numMessagesPerShard;
private final int shardNumber;
private final boolean dedup;
private final boolean throwOnFirstSnapshot;
private final boolean allowSplitting;
/**
* We only allow an exception to be thrown from getCheckpointMark
* at most once. This must be static since the entire TestCountingSource
* instance may re-serialized when the pipeline recovers and retries.
*/
private static boolean thrown = false;
public static void setFinalizeTracker(List<Integer> finalizeTracker) {
TestCountingSource.finalizeTracker = finalizeTracker;
}
public TestCountingSource(int numMessagesPerShard) {
this(numMessagesPerShard, 0, false, false, true);
}
public TestCountingSource withDedup() {
return new TestCountingSource(
numMessagesPerShard, shardNumber, true, throwOnFirstSnapshot, true);
}
private TestCountingSource withShardNumber(int shardNumber) {
return new TestCountingSource(
numMessagesPerShard, shardNumber, dedup, throwOnFirstSnapshot, true);
}
public TestCountingSource withThrowOnFirstSnapshot(boolean throwOnFirstSnapshot) {
return new TestCountingSource(
numMessagesPerShard, shardNumber, dedup, throwOnFirstSnapshot, true);
}
public TestCountingSource withoutSplitting() {
return new TestCountingSource(
numMessagesPerShard, shardNumber, dedup, throwOnFirstSnapshot, false);
}
private TestCountingSource(int numMessagesPerShard, int shardNumber, boolean dedup,
boolean throwOnFirstSnapshot, boolean allowSplitting) {
this.numMessagesPerShard = numMessagesPerShard;
this.shardNumber = shardNumber;
this.dedup = dedup;
this.throwOnFirstSnapshot = throwOnFirstSnapshot;
this.allowSplitting = allowSplitting;
}
public int getShardNumber() {
return shardNumber;
}
@Override
public List<TestCountingSource> split(
int desiredNumSplits, PipelineOptions options) {
List<TestCountingSource> splits = new ArrayList<>();
int numSplits = allowSplitting ? desiredNumSplits : 1;
for (int i = 0; i < numSplits; i++) {
splits.add(withShardNumber(i));
}
return splits;
}
class CounterMark implements UnboundedSource.CheckpointMark {
int current;
public CounterMark(int current) {
this.current = current;
}
@Override
public void finalizeCheckpoint() {
if (finalizeTracker != null) {
finalizeTracker.add(current);
}
}
}
@Override
public Coder<CounterMark> getCheckpointMarkCoder() {
return DelegateCoder.of(
VarIntCoder.of(),
new DelegateCoder.CodingFunction<CounterMark, Integer>() {
@Override
public Integer apply(CounterMark input) {
return input.current;
}
},
new DelegateCoder.CodingFunction<Integer, CounterMark>() {
@Override
public CounterMark apply(Integer input) {
return new CounterMark(input);
}
});
}
@Override
public boolean requiresDeduping() {
return dedup;
}
/**
* Public only so that the checkpoint can be conveyed from {@link #getCheckpointMark()} to
* {@link TestCountingSource#createReader(PipelineOptions, CounterMark)} without cast.
*/
public class CountingSourceReader extends UnboundedReader<KV<Integer, Integer>> {
private int current;
public CountingSourceReader(int startingPoint) {
this.current = startingPoint;
}
@Override
public boolean start() {
return advance();
}
@Override
public boolean advance() {
if (current >= numMessagesPerShard - 1) {
return false;
}
// If testing dedup, occasionally insert a duplicate value;
if (current >= 0 && dedup && ThreadLocalRandom.current().nextInt(5) == 0) {
return true;
}
current++;
return true;
}
@Override
public KV<Integer, Integer> getCurrent() {
return KV.of(shardNumber, current);
}
@Override
public Instant getCurrentTimestamp() {
return new Instant(current);
}
@Override
public byte[] getCurrentRecordId() {
try {
return encodeToByteArray(KvCoder.of(VarIntCoder.of(), VarIntCoder.of()), getCurrent());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void close() {}
@Override
public TestCountingSource getCurrentSource() {
return TestCountingSource.this;
}
@Override
public Instant getWatermark() {
// The watermark is a promise about future elements, and the timestamps of elements are
// strictly increasing for this source.
return new Instant(current + 1);
}
@Override
public CounterMark getCheckpointMark() {
if (throwOnFirstSnapshot && !thrown) {
thrown = true;
LOG.error("Throwing exception while checkpointing counter");
throw new RuntimeException("failed during checkpoint");
}
// The checkpoint can assume all records read, including the current, have
// been commited.
return new CounterMark(current);
}
@Override
public long getSplitBacklogBytes() {
return 7L;
}
}
@Override
public CountingSourceReader createReader(
PipelineOptions options, @Nullable CounterMark checkpointMark) {
if (checkpointMark == null) {
LOG.debug("creating reader");
} else {
LOG.debug("restoring reader from checkpoint with current = {}", checkpointMark.current);
}
return new CountingSourceReader(checkpointMark != null ? checkpointMark.current : -1);
}
@Override
public Coder<KV<Integer, Integer>> getOutputCoder() {
return KvCoder.of(VarIntCoder.of(), VarIntCoder.of());
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.structuralsearch;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.codeInsight.template.JavaCodeContextType;
import com.intellij.codeInsight.template.TemplateContextType;
import com.intellij.dupLocator.iterators.NodeIterator;
import com.intellij.dupLocator.util.NodeFilter;
import com.intellij.ide.highlighter.JavaFileType;
import com.intellij.lang.Language;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.LanguageFileType;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.structuralsearch.impl.matcher.*;
import com.intellij.structuralsearch.impl.matcher.compiler.GlobalCompilingVisitor;
import com.intellij.structuralsearch.impl.matcher.compiler.JavaCompilingVisitor;
import com.intellij.structuralsearch.impl.matcher.compiler.PatternCompiler;
import com.intellij.structuralsearch.plugin.replace.ReplaceOptions;
import com.intellij.structuralsearch.plugin.replace.impl.ParameterInfo;
import com.intellij.structuralsearch.plugin.replace.impl.ReplacementBuilder;
import com.intellij.structuralsearch.plugin.replace.impl.ReplacementContext;
import com.intellij.structuralsearch.plugin.replace.impl.Replacer;
import com.intellij.structuralsearch.plugin.ui.Configuration;
import com.intellij.structuralsearch.plugin.ui.SearchContext;
import com.intellij.structuralsearch.plugin.ui.UIUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author Eugene.Kudelevsky
*/
public class JavaStructuralSearchProfile extends StructuralSearchProfile {
private static final Set<String> PRIMITIVE_TYPES = new THashSet<>(Arrays.asList(
PsiKeyword.SHORT, PsiKeyword.BOOLEAN,
PsiKeyword.DOUBLE, PsiKeyword.LONG,
PsiKeyword.INT, PsiKeyword.FLOAT,
PsiKeyword.CHAR, PsiKeyword.BYTE
));
@Override
public String getText(PsiElement match, int start, int end) {
if (match instanceof PsiIdentifier) {
PsiElement parent = match.getParent();
if (parent instanceof PsiJavaCodeReferenceElement && !(parent instanceof PsiExpression)) {
match = parent; // care about generic
}
}
final String matchText = match.getText();
if (start==0 && end==-1) return matchText;
return matchText.substring(start,end == -1? matchText.length():end);
}
@Override
public Class getElementContextByPsi(PsiElement element) {
if (element instanceof PsiIdentifier) {
element = element.getParent();
}
if (element instanceof PsiMember) {
return PsiMember.class;
} else {
return PsiExpression.class;
}
}
@Override
@NotNull
public String getTypedVarString(final PsiElement element) {
String text;
if (element instanceof PsiNamedElement) {
text = ((PsiNamedElement)element).getName();
}
else if (element instanceof PsiAnnotation) {
PsiJavaCodeReferenceElement referenceElement = ((PsiAnnotation)element).getNameReferenceElement();
text = referenceElement == null ? null : referenceElement.getQualifiedName();
}
else if (element instanceof PsiNameValuePair) {
text = ((PsiNameValuePair)element).getName();
}
else {
text = element.getText();
if (StringUtil.startsWithChar(text, '@')) {
text = text.substring(1);
}
if (StringUtil.endsWithChar(text, ';')) text = text.substring(0, text.length() - 1);
else if (element instanceof PsiExpressionStatement) {
int i = text.indexOf(';');
if (i != -1) text = text.substring(0, i);
}
}
if (text==null) text = element.getText();
return text;
}
@Override
public String getMeaningfulText(PsiElement element) {
if (element instanceof PsiReferenceExpression &&
((PsiReferenceExpression)element).getQualifierExpression() != null) {
final PsiElement resolve = ((PsiReferenceExpression)element).resolve();
if (resolve instanceof PsiClass) return element.getText();
final PsiElement referencedElement = ((PsiReferenceExpression)element).getReferenceNameElement();
String text = referencedElement != null ? referencedElement.getText() : "";
if (resolve == null && text.length() > 0 && Character.isUpperCase(text.charAt(0))) {
return element.getText();
}
return text;
}
return super.getMeaningfulText(element);
}
@Override
public PsiElement updateCurrentNode(PsiElement targetNode) {
if (targetNode instanceof PsiCodeBlock && ((PsiCodeBlock)targetNode).getStatements().length == 1) {
PsiElement targetNodeParent = targetNode.getParent();
if (targetNodeParent instanceof PsiBlockStatement) {
targetNodeParent = targetNodeParent.getParent();
}
if (targetNodeParent instanceof PsiIfStatement || targetNodeParent instanceof PsiLoopStatement) {
targetNode = targetNodeParent;
}
}
return targetNode;
}
@Override
public PsiElement extendMatchedByDownUp(PsiElement targetNode) {
if (targetNode instanceof PsiIdentifier) {
targetNode = targetNode.getParent();
final PsiElement parent = targetNode.getParent();
if (parent instanceof PsiTypeElement || parent instanceof PsiStatement) targetNode = parent;
}
return targetNode;
}
@Override
public PsiElement extendMatchOnePsiFile(PsiElement file) {
if (file instanceof PsiIdentifier) {
// Searching in previous results
file = file.getParent();
}
return file;
}
@NotNull
@Override
public PsiElement getPresentableElement(PsiElement element) {
if (element instanceof PsiReferenceExpression) {
final PsiElement parent = element.getParent();
if (parent instanceof PsiMethodCallExpression) {
return parent;
}
}
else if (element instanceof PsiJavaCodeReferenceElement) {
final PsiElement parent = element.getParent();
if (parent instanceof PsiTypeElement || parent instanceof PsiNewExpression || parent instanceof PsiAnnotation) {
return parent;
}
}
return element;
}
@Override
public void compile(PsiElement[] elements, @NotNull GlobalCompilingVisitor globalVisitor) {
elements[0].getParent().accept(new JavaCompilingVisitor(globalVisitor));
}
@Override
@NotNull
public PsiElementVisitor createMatchingVisitor(@NotNull GlobalMatchingVisitor globalVisitor) {
return new JavaMatchingVisitor(globalVisitor);
}
@NotNull
@Override
public NodeFilter getLexicalNodesFilter() {
return element -> isLexicalNode(element);
}
private static boolean isLexicalNode(PsiElement element) {
if (element instanceof PsiWhiteSpace) {
return true;
}
else if (element instanceof PsiJavaToken) {
// do not filter out type keyword of new primitive arrays (e.g. int in new int[10])
return !(element instanceof PsiKeyword &&
PRIMITIVE_TYPES.contains(element.getText()) &&
element.getParent() instanceof PsiNewExpression);
}
return false;
}
@Override
@NotNull
public CompiledPattern createCompiledPattern() {
return new JavaCompiledPattern();
}
@Override
public boolean isMyLanguage(@NotNull Language language) {
return language == JavaLanguage.INSTANCE;
}
@Override
public StructuralReplaceHandler getReplaceHandler(@NotNull ReplacementContext context) {
return new JavaReplaceHandler(context);
}
@NotNull
@Override
public PsiElement[] createPatternTree(@NotNull String text,
@NotNull PatternTreeContext context,
@NotNull FileType fileType,
@Nullable Language language,
String contextName, @Nullable String extension,
@NotNull Project project,
boolean physical) {
if (physical) {
throw new UnsupportedOperationException(getClass() + " cannot create physical PSI");
}
final PsiElementFactory elementFactory = JavaPsiFacade.getInstance(project).getElementFactory();
if (context == PatternTreeContext.Block) {
final PsiElement element = elementFactory.createStatementFromText("{\n" + text + "\n}", null);
final PsiElement[] children = ((PsiBlockStatement)element).getCodeBlock().getChildren();
final int extraChildCount = 4;
if (children.length > extraChildCount) {
PsiElement[] result = new PsiElement[children.length - extraChildCount];
System.arraycopy(children, 2, result, 0, children.length - extraChildCount);
if (shouldTryExpressionPattern(result)) {
try {
final PsiElement[] expressionPattern =
createPatternTree(text, PatternTreeContext.Expression, fileType, language, contextName, extension, project, false);
if (expressionPattern.length == 1) {
result = expressionPattern;
}
} catch (IncorrectOperationException ignore) {}
}
else if (shouldTryClassPattern(result)) {
final PsiElement[] classPattern =
createPatternTree(text, PatternTreeContext.Class, fileType, language, contextName, extension, project, false);
if (classPattern.length == 1) {
result = classPattern;
}
}
return result;
}
else {
return PsiElement.EMPTY_ARRAY;
}
}
else if (context == PatternTreeContext.Class) {
final PsiClass clazz = elementFactory.createClassFromText(text, null);
PsiElement startChild = clazz.getLBrace();
if (startChild != null) startChild = startChild.getNextSibling();
PsiElement endChild = clazz.getRBrace();
if (endChild != null) endChild = endChild.getPrevSibling();
if (startChild == endChild) return PsiElement.EMPTY_ARRAY; // nothing produced
final PsiCodeBlock codeBlock = elementFactory.createCodeBlock();
final List<PsiElement> result = new ArrayList<>(3);
assert startChild != null;
for (PsiElement el = startChild.getNextSibling(); el != endChild && el != null; el = el.getNextSibling()) {
if (el instanceof PsiErrorElement) continue;
result.add(codeBlock.add(el));
}
return PsiUtilCore.toPsiElementArray(result);
}
else if (context == PatternTreeContext.Expression) {
final PsiExpression expression = elementFactory.createExpressionFromText(text, null);
final PsiBlockStatement statement = (PsiBlockStatement)elementFactory.createStatementFromText("{\na\n}", null);
final PsiElement[] children = statement.getCodeBlock().getChildren();
if (children.length != 5) return PsiElement.EMPTY_ARRAY;
final PsiExpressionStatement childStatement = (PsiExpressionStatement)children[2];
childStatement.getExpression().replace(expression);
return new PsiElement[] { childStatement };
}
else {
return PsiFileFactory.getInstance(project).createFileFromText("__dummy.java", JavaFileType.INSTANCE, text).getChildren();
}
}
private static boolean shouldTryExpressionPattern(PsiElement[] elements) {
if (elements.length >= 1 && elements.length <= 3) {
final PsiElement firstElement = elements[0];
if (firstElement instanceof PsiDeclarationStatement) {
final PsiElement lastChild = firstElement.getLastChild();
if (lastChild instanceof PsiErrorElement && PsiTreeUtil.prevLeaf(lastChild) instanceof PsiErrorElement) {
// Because an identifier followed by < (less than) is parsed as the start of a declaration
// in com.intellij.lang.java.parser.StatementParser.parseStatement() line 236
// but it could just be a comparison
return true;
}
}
}
return false;
}
private static boolean shouldTryClassPattern(PsiElement[] elements) {
if (elements.length < 2) {
return false;
}
final PsiElement firstElement = elements[0];
final PsiElement secondElement = elements[1];
if (firstElement instanceof PsiDocComment) {
// might be method with javadoc
return true;
}
else if (firstElement instanceof PsiDeclarationStatement && PsiTreeUtil.lastChild(firstElement) instanceof PsiErrorElement) {
// might be method
return true;
}
else if (firstElement instanceof PsiErrorElement &&
secondElement instanceof PsiExpressionStatement &&
PsiTreeUtil.lastChild(secondElement) instanceof PsiErrorElement) {
// might be generic method
return true;
}
else if (elements.length == 3 && PsiModifier.STATIC.equals(firstElement.getText()) && secondElement instanceof PsiWhiteSpace &&
elements[2] instanceof PsiBlockStatement) {
// looks like static initializer
return true;
}
return false;
}
@NotNull
@Override
public Editor createEditor(@NotNull SearchContext searchContext,
@NotNull FileType fileType,
Language dialect,
String text,
boolean useLastConfiguration) {
// provides autocompletion
PsiElement element = searchContext.getFile();
if (element != null && !useLastConfiguration) {
final Editor selectedEditor = FileEditorManager.getInstance(searchContext.getProject()).getSelectedTextEditor();
if (selectedEditor != null) {
int caretPosition = selectedEditor.getCaretModel().getOffset();
PsiElement positionedElement = searchContext.getFile().findElementAt(caretPosition);
if (positionedElement == null) {
positionedElement = searchContext.getFile().findElementAt(caretPosition + 1);
}
if (positionedElement != null) {
element = PsiTreeUtil.getParentOfType(
positionedElement,
PsiClass.class, PsiCodeBlock.class
);
}
}
}
final PsiManager psimanager = PsiManager.getInstance(searchContext.getProject());
final Project project = psimanager.getProject();
final PsiCodeFragment file = createCodeFragment(project, text, element);
final Document doc = PsiDocumentManager.getInstance(searchContext.getProject()).getDocument(file);
DaemonCodeAnalyzer.getInstance(searchContext.getProject()).setHighlightingEnabled(file, false);
return UIUtil.createEditor(doc, searchContext.getProject(), true, true, getTemplateContextType());
}
@Override
public Class<? extends TemplateContextType> getTemplateContextTypeClass() {
return JavaCodeContextType.class;
}
@Override
public PsiCodeFragment createCodeFragment(Project project, String text, PsiElement context) {
final JavaCodeFragmentFactory factory = JavaCodeFragmentFactory.getInstance(project);
return factory.createCodeBlockCodeFragment(text, context, true);
}
@Override
public void checkSearchPattern(Project project, MatchOptions options) {
ValidatingVisitor visitor = new ValidatingVisitor();
final CompiledPattern compiledPattern = PatternCompiler.compilePattern(project, options);
final int nodeCount = compiledPattern.getNodeCount();
final NodeIterator nodes = compiledPattern.getNodes();
while (nodes.hasNext()) {
final PsiElement current = nodes.current();
visitor.setCurrent((nodeCount == 1 && (current instanceof PsiExpressionStatement|| current instanceof PsiDeclarationStatement))
? current : null);
current.accept(visitor);
nodes.advance();
}
nodes.reset();
}
@Override
public void checkReplacementPattern(Project project, ReplaceOptions options) {
MatchOptions matchOptions = options.getMatchOptions();
FileType fileType = matchOptions.getFileType();
PsiElement[] statements = MatcherImplUtil.createTreeFromText(
matchOptions.getSearchPattern(),
PatternTreeContext.Block,
fileType,
project
);
final boolean searchIsExpression = statements.length == 1 && statements[0].getLastChild() instanceof PsiErrorElement;
PsiElement[] statements2 = MatcherImplUtil.createTreeFromText(
options.getReplacement(),
PatternTreeContext.Block,
fileType,
project
);
final boolean replaceIsExpression = statements2.length == 1 && statements2[0].getLastChild() instanceof PsiErrorElement;
ValidatingVisitor visitor = new ValidatingVisitor();
for (PsiElement statement : statements2) {
visitor.setCurrent((statements.length == 1 && (statement instanceof PsiExpressionStatement || statement instanceof PsiDeclarationStatement))
? statement : null);
statement.accept(visitor);
}
if (searchIsExpression && statements[0].getFirstChild() instanceof PsiModifierList && statements2.length == 0) {
return;
}
boolean targetFound = false;
for (final String name : matchOptions.getVariableConstraintNames()) {
final MatchVariableConstraint constraint = matchOptions.getVariableConstraint(name);
if (constraint.isPartOfSearchResults()) {
targetFound = true;
break;
}
}
if (!targetFound && searchIsExpression != replaceIsExpression) {
throw new UnsupportedPatternException(
searchIsExpression ? SSRBundle.message("replacement.template.is.not.expression.error.message") :
SSRBundle.message("search.template.is.not.expression.error.message")
);
}
}
static class ValidatingVisitor extends JavaRecursiveElementWalkingVisitor {
private PsiElement myCurrent;
@Override public void visitAnnotation(PsiAnnotation annotation) {
final PsiJavaCodeReferenceElement nameReferenceElement = annotation.getNameReferenceElement();
if (nameReferenceElement == null ||
!nameReferenceElement.getText().equals(MatchOptions.MODIFIER_ANNOTATION_NAME)) {
return;
}
for(PsiNameValuePair pair:annotation.getParameterList().getAttributes()) {
final PsiAnnotationMemberValue value = pair.getValue();
if (value instanceof PsiArrayInitializerMemberValue) {
for(PsiAnnotationMemberValue v:((PsiArrayInitializerMemberValue)value).getInitializers()) {
final String name = StringUtil.unquoteString(v.getText());
checkModifier(name);
}
} else if (value != null) {
final String name = StringUtil.unquoteString(value.getText());
checkModifier(name);
}
}
}
private static void checkModifier(final String name) {
if (!MatchOptions.INSTANCE_MODIFIER_NAME.equals(name) &&
!PsiModifier.PACKAGE_LOCAL.equals(name) &&
ArrayUtil.find(JavaMatchingVisitor.MODIFIERS, name) < 0
) {
throw new MalformedPatternException(SSRBundle.message("invalid.modifier.type",name));
}
}
@Override
public void visitErrorElement(PsiErrorElement element) {
super.visitErrorElement(element);
final PsiElement parent = element.getParent();
final String errorDescription = element.getErrorDescription();
if (parent instanceof PsiClass && "Identifier expected".equals(errorDescription)) {
// other class content variable.
return;
}
if (parent instanceof PsiTryStatement && "'catch' or 'finally' expected".equals(errorDescription)) {
// searching for naked try allowed
return;
}
if (parent == myCurrent) {
// search for expression, type, annotation or symbol
if ("';' expected".equals(errorDescription)) {
// expression
return;
}
if ("Identifier or type expected".equals(errorDescription)) {
// annotation
return;
}
if ("Identifier expected".equals(errorDescription)) {
// type
return;
}
}
throw new MalformedPatternException(errorDescription);
}
void setCurrent(PsiElement current) {
myCurrent = current;
}
}
@Override
public LanguageFileType getDefaultFileType(LanguageFileType currentDefaultFileType) {
return StdFileTypes.JAVA;
}
@Override
public Configuration[] getPredefinedTemplates() {
return JavaPredefinedConfigurations.createPredefinedTemplates();
}
@Override
public void provideAdditionalReplaceOptions(@NotNull PsiElement node, final ReplaceOptions options, final ReplacementBuilder builder) {
node.accept(new JavaRecursiveElementWalkingVisitor() {
@Override
public void visitReferenceExpression(PsiReferenceExpression expression) {
visitElement(expression);
}
@Override
public void visitParameter(PsiParameter parameter) {
super.visitParameter(parameter);
String name = parameter.getName();
String type = parameter.getType().getCanonicalText();
if (StructuralSearchUtil.isTypedVariable(name)) {
name = Replacer.stripTypedVariableDecoration(name);
if (StructuralSearchUtil.isTypedVariable(type)) {
type = Replacer.stripTypedVariableDecoration(type);
}
ParameterInfo nameInfo = builder.findParameterization(name);
ParameterInfo typeInfo = builder.findParameterization(type);
final PsiElement scope = parameter.getDeclarationScope();
if (nameInfo != null && typeInfo != null && !(scope instanceof PsiCatchSection) && !(scope instanceof PsiForeachStatement)) {
nameInfo.setArgumentContext(false);
typeInfo.setArgumentContext(false);
typeInfo.setMethodParameterContext(true);
nameInfo.setMethodParameterContext(true);
typeInfo.setElement(parameter.getTypeElement());
}
}
}
});
}
@Override
public int handleSubstitution(final ParameterInfo info,
MatchResult match,
StringBuilder result,
int offset,
HashMap<String, MatchResult> matchMap) {
if (info.getName().equals(match.getName())) {
final String replacementString;
boolean forceAddingNewLine = false;
if (info.isMethodParameterContext()) {
final StringBuilder buf = new StringBuilder();
handleMethodParameter(buf, info, matchMap);
replacementString = buf.toString();
}
else if (match.hasSons() && !match.isScopeMatch()) {
// compound matches
final StringBuilder buf = new StringBuilder();
MatchResult previous = null;
boolean stripSemicolon = false;
for (final MatchResult matchResult : match.getAllSons()) {
final PsiElement currentElement = matchResult.getMatch();
stripSemicolon = !(currentElement instanceof PsiField);
if (previous != null) {
final PsiElement parent = currentElement.getParent();
if (parent instanceof PsiVariable) {
final PsiElement prevSibling = PsiTreeUtil.skipWhitespacesBackward(parent);
if (PsiUtil.isJavaToken(prevSibling, JavaTokenType.COMMA)) {
buf.append(',');
}
}
else if (info.isStatementContext()) {
final PsiElement prevSibling = currentElement.getPrevSibling();
if (prevSibling instanceof PsiWhiteSpace && prevSibling.getPrevSibling() == previous.getMatch()) {
// sequential statements matched so preserve whitespace
buf.append(prevSibling.getText());
}
else {
buf.append('\n');
}
}
else if (info.isArgumentContext()) {
buf.append(',');
}
else if (parent instanceof PsiClass) {
final PsiElement prevSibling = PsiTreeUtil.skipWhitespacesBackward(currentElement);
if (PsiUtil.isJavaToken(prevSibling, JavaTokenType.COMMA)) {
buf.append(',');
}
else {
buf.append('\n');
}
}
else if (parent instanceof PsiReferenceList) {
buf.append(',');
}
else if (parent instanceof PsiPolyadicExpression) {
final PsiPolyadicExpression expression = (PsiPolyadicExpression)parent;
final PsiJavaToken token = expression.getTokenBeforeOperand(expression.getOperands()[1]);
if (token != null) {
buf.append(token.getText());
}
}
else {
buf.append(' ');
}
}
buf.append(matchResult.getMatchImage());
forceAddingNewLine = currentElement instanceof PsiComment;
previous = matchResult;
}
replacementString = stripSemicolon ? StringUtil.trimEnd(buf.toString(), ';') : buf.toString();
} else {
final PsiElement matchElement = match.getMatch();
if (info.isStatementContext()) {
forceAddingNewLine = matchElement instanceof PsiComment;
}
final String matchImage = match.getMatchImage();
replacementString = !(matchElement instanceof PsiField) ? StringUtil.trimEnd(matchImage, ';') : matchImage;
}
offset = Replacer.insertSubstitution(result, offset, info, replacementString);
offset = removeExtraSemicolon(info, offset, result, match);
if (forceAddingNewLine && info.isStatementContext()) {
result.insert(info.getStartIndex() + offset + 1, '\n');
offset++;
}
}
return offset;
}
@Override
public int handleNoSubstitution(ParameterInfo info, int offset, StringBuilder result) {
final PsiElement element = info.getElement();
final PsiElement prevSibling = PsiTreeUtil.skipWhitespacesBackward(element);
if (prevSibling instanceof PsiJavaToken && isRemovableToken(prevSibling)) {
final int start = info.getBeforeDelimiterPos() + offset - (prevSibling.getTextLength() - 1);
final int end = info.getStartIndex() + offset;
result.delete(start, end);
return offset - (end - start);
}
final PsiElement nextSibling = PsiTreeUtil.skipWhitespacesForward(element);
if (nextSibling instanceof PsiJavaToken && isRemovableToken(nextSibling)) {
final int start = info.getStartIndex() + offset;
final int end = info.getAfterDelimiterPos() + nextSibling.getTextLength() + offset;
result.delete(start, end);
return offset - 1;
}
if (element == null || !(element.getParent() instanceof PsiForStatement)) {
return removeExtraSemicolon(info, offset, result, null);
}
return offset;
}
private static boolean isRemovableToken(PsiElement element) {
final PsiElement parent = element.getParent();
if (!(parent instanceof PsiAnnotationParameterList || // ',' between annotation parameters
parent instanceof PsiAssertStatement || // ':' before assertion message
parent instanceof PsiExpressionList || // ',' between expressions
parent instanceof PsiParameterList || // ',' between parameters
parent instanceof PsiPolyadicExpression || // '+', '*', '&&' etcetera
parent instanceof PsiReferenceList || // ','
parent instanceof PsiReferenceParameterList || // ','
parent instanceof PsiResourceList || // ';'
parent instanceof PsiTypeParameterList || // ','
parent instanceof PsiVariable)) { // '=' before initializer
return false;
}
final String text = element.getText();
if (text.length() != 1) {
return true;
}
switch(text.charAt(0)) {
case '<':
case '>':
case '(':
case ')':
case '{':
case '}':
case '[':
case ']':
return false;
default:
return true;
}
}
@Override
public boolean isIdentifier(PsiElement element) {
return element instanceof PsiIdentifier;
}
@NotNull
@Override
public Collection<String> getReservedWords() {
return Collections.singleton(PsiModifier.PACKAGE_LOCAL);
}
@Override
public boolean isDocCommentOwner(PsiElement match) {
return match instanceof PsiMember;
}
private static void handleMethodParameter(StringBuilder buf, ParameterInfo info, HashMap<String, MatchResult> matchMap) {
if(!(info.getElement() instanceof PsiTypeElement)) {
// no specific handling for name of method parameter since it is handled with type
return;
}
String name = ((PsiParameter)info.getElement().getParent()).getName();
name = StructuralSearchUtil.isTypedVariable(name) ? Replacer.stripTypedVariableDecoration(name):name;
final MatchResult matchResult = matchMap.get(name);
if (matchResult == null) return;
if (matchResult.isMultipleMatch()) {
for (MatchResult result : matchResult.getAllSons()) {
if (buf.length() > 0) {
buf.append(',');
}
appendParameter(buf, result);
}
} else {
appendParameter(buf, matchResult);
}
}
private static void appendParameter(final StringBuilder buf, final MatchResult matchResult) {
final List<MatchResult> sons = matchResult.getAllSons();
assert sons.size() == 1;
buf.append(sons.get(0).getMatchImage()).append(' ').append(matchResult.getMatchImage());
}
private static int removeExtraSemicolon(ParameterInfo info, int offset, StringBuilder result, MatchResult match) {
if (info.isStatementContext()) {
final int index = offset + info.getStartIndex();
if (result.charAt(index)==';' &&
( match == null ||
( result.charAt(index-1)=='}' &&
!(match.getMatch() instanceof PsiDeclarationStatement) && // array init in dcl
!(match.getMatch() instanceof PsiNewExpression) // array initializer
) ||
( !match.isMultipleMatch() && // ; in comment
match.getMatch() instanceof PsiComment
) ||
( match.isMultipleMatch() && // ; in comment
match.getAllSons().get( match.getAllSons().size() - 1 ).getMatch() instanceof PsiComment
)
)
) {
result.deleteCharAt(index);
--offset;
}
}
return offset;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.s3a.s3guard;
import java.util.Collection;
import java.util.Iterator;
import com.amazonaws.services.dynamodbv2.document.Item;
import com.amazonaws.services.dynamodbv2.document.ItemCollection;
import com.amazonaws.services.dynamodbv2.document.QueryOutcome;
import com.amazonaws.services.dynamodbv2.document.ScanOutcome;
import com.amazonaws.services.dynamodbv2.document.Table;
import com.amazonaws.services.dynamodbv2.document.internal.IteratorSupport;
import com.amazonaws.services.dynamodbv2.document.spec.QuerySpec;
import com.amazonaws.services.dynamodbv2.xspec.ExpressionSpecBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3a.Retries;
import org.apache.hadoop.fs.s3a.S3AFileStatus;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.hadoop.fs.s3a.s3guard.DynamoDBMetadataStore.VERSION_MARKER_ITEM_NAME;
import static org.apache.hadoop.fs.s3a.s3guard.PathMetadataDynamoDBTranslation.CHILD;
import static org.apache.hadoop.fs.s3a.s3guard.PathMetadataDynamoDBTranslation.PARENT;
import static org.apache.hadoop.fs.s3a.s3guard.PathMetadataDynamoDBTranslation.TABLE_VERSION;
import static org.apache.hadoop.fs.s3a.s3guard.PathMetadataDynamoDBTranslation.itemToPathMetadata;
import static org.apache.hadoop.fs.s3a.s3guard.PathMetadataDynamoDBTranslation.pathToKey;
/**
* Package-scoped accessor to table state in S3Guard.
* This is for maintenance, diagnostics and testing: it is <i>not</i> to
* be used otherwise.
* <ol>
* <li>
* Some of the operations here may dramatically alter the state of
* a table, so use carefully.
* </li>
* <li>
* Operations to assess consistency of a store are best executed
* against a table which is otherwise inactive.
* </li>
* <li>
* No retry/throttling or AWS to IOE logic here.
* </li>
* <li>
* If a scan or query includes the version marker in the result, it
* is converted to a {@link VersionMarker} instance.
* </li>
* </ol>
*
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
@Retries.OnceRaw
class S3GuardTableAccess {
private static final Logger LOG =
LoggerFactory.getLogger(S3GuardTableAccess.class);
/**
* Store instance to work with.
*/
private final DynamoDBMetadataStore store;
/**
* Table; retrieved from the store.
*/
private final Table table;
/**
* Construct.
* @param store store to work with.
*/
S3GuardTableAccess(final DynamoDBMetadataStore store) {
this.store = checkNotNull(store);
this.table = checkNotNull(store.getTable());
}
/**
* Username of user in store.
* @return a string.
*/
private String getUsername() {
return store.getUsername();
}
/**
* Execute a query.
* @param spec query spec.
* @return the outcome.
*/
@Retries.OnceRaw
ItemCollection<QueryOutcome> query(QuerySpec spec) {
return table.query(spec);
}
/**
* Issue a query where the result is to be an iterator over
* the entries
* of DDBPathMetadata instances.
* @param spec query spec.
* @return an iterator over path entries.
*/
@Retries.OnceRaw
Iterable<DDBPathMetadata> queryMetadata(QuerySpec spec) {
return new DDBPathMetadataCollection<>(query(spec));
}
@Retries.OnceRaw
ItemCollection<ScanOutcome> scan(ExpressionSpecBuilder spec) {
return table.scan(spec.buildForScan());
}
@Retries.OnceRaw
Iterable<DDBPathMetadata> scanMetadata(ExpressionSpecBuilder spec) {
return new DDBPathMetadataCollection<>(scan(spec));
}
@Retries.OnceRaw
void delete(Collection<Path> paths) {
paths.stream()
.map(PathMetadataDynamoDBTranslation::pathToKey)
.forEach(table::deleteItem);
}
@Retries.OnceRaw
void delete(Path path) {
table.deleteItem(pathToKey(path));
}
/**
* A collection which wraps the result of a query or scan.
* Important: iterate through this only once; the outcome
* of repeating an iteration is "undefined"
* @param <T> type of outcome.
*/
private final class DDBPathMetadataCollection<T>
implements Iterable<DDBPathMetadata> {
/**
* Query/scan result.
*/
private final ItemCollection<T> outcome;
/**
* Instantiate.
* @param outcome query/scan outcome.
*/
private DDBPathMetadataCollection(final ItemCollection<T> outcome) {
this.outcome = outcome;
}
/**
* Get the iterator.
* @return the iterator.
*/
@Override
public Iterator<DDBPathMetadata> iterator() {
return new DDBPathMetadataIterator<>(outcome.iterator());
}
}
/**
* An iterator which converts the iterated-over result of
* a query or scan into a {@code DDBPathMetadataIterator} entry.
* @param <T> type of source.
*/
private final class DDBPathMetadataIterator<T> implements
Iterator<DDBPathMetadata> {
/**
* Iterator to invoke.
*/
private final IteratorSupport<Item, T> it;
/**
* Instantiate.
* @param it Iterator to invoke.
*/
private DDBPathMetadataIterator(final IteratorSupport<Item, T> it) {
this.it = it;
}
@Override
@Retries.OnceRaw
public boolean hasNext() {
return it.hasNext();
}
@Override
@Retries.OnceRaw
public DDBPathMetadata next() {
Item item = it.next();
Pair<String, String> key = primaryKey(item);
if (VERSION_MARKER_ITEM_NAME.equals(key.getLeft()) &&
VERSION_MARKER_ITEM_NAME.equals(key.getRight())) {
// a version marker is found, return the special type
return new VersionMarker(item);
} else {
return itemToPathMetadata(item, getUsername());
}
}
}
/**
* DDBPathMetadata subclass returned when a query returns
* the version marker.
* There is a FileStatus returned where the owner field contains
* the table version; the path is always the unqualified path "/VERSION".
* Because it is unqualified, operations which treat this as a normal
* DDB metadata entry usually fail.
*/
static final class VersionMarker extends DDBPathMetadata {
/**
* Instantiate.
* @param versionMarker the version marker.
*/
VersionMarker(Item versionMarker) {
super(new S3AFileStatus(true, new Path("/VERSION"),
"" + versionMarker.getString(TABLE_VERSION)));
}
}
/**
* Given an item, split it to the parent and child fields.
* @param item item to split.
* @return (parent, child).
*/
private static Pair<String, String> primaryKey(Item item) {
return Pair.of(item.getString(PARENT), item.getString(CHILD));
}
}
| |
package org.daisy.pipeline.braille.dotify.impl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.NoSuchElementException;
import com.google.common.base.MoreObjects;
import com.google.common.base.MoreObjects.ToStringHelper;
import com.google.common.collect.ImmutableList;
import static com.google.common.collect.Iterables.size;
import cz.vutbr.web.css.CSSProperty;
import org.daisy.braille.css.BrailleCSSProperty.Hyphens;
import org.daisy.braille.css.SimpleInlineStyle;
import org.daisy.dotify.api.translator.BrailleFilter;
import org.daisy.dotify.api.translator.BrailleFilterFactoryService;
import org.daisy.dotify.api.translator.BrailleTranslatorFactory;
import org.daisy.dotify.api.translator.Translatable;
import org.daisy.dotify.api.translator.TranslationException;
import org.daisy.dotify.api.translator.TranslatorConfigurationException;
import org.daisy.dotify.api.translator.TranslatorMode;
import org.daisy.dotify.api.translator.TranslatorType;
import org.daisy.pipeline.braille.common.AbstractBrailleTranslator;
import org.daisy.pipeline.braille.common.AbstractBrailleTranslator.util.DefaultLineBreaker;
import org.daisy.pipeline.braille.common.AbstractTransformProvider;
import org.daisy.pipeline.braille.common.AbstractTransformProvider.util.Function;
import org.daisy.pipeline.braille.common.AbstractTransformProvider.util.Iterables;
import static org.daisy.pipeline.braille.common.AbstractTransformProvider.util.Iterables.concat;
import static org.daisy.pipeline.braille.common.AbstractTransformProvider.util.logCreate;
import static org.daisy.pipeline.braille.common.AbstractTransformProvider.util.logSelect;
import org.daisy.pipeline.braille.common.BrailleTranslatorProvider;
import org.daisy.pipeline.braille.common.CSSStyledText;
import org.daisy.pipeline.braille.common.Hyphenator;
import org.daisy.pipeline.braille.common.HyphenatorProvider;
import org.daisy.pipeline.braille.common.Query;
import org.daisy.pipeline.braille.common.Query.Feature;
import org.daisy.pipeline.braille.common.Query.MutableQuery;
import static org.daisy.pipeline.braille.common.Query.util.mutableQuery;
import org.daisy.pipeline.braille.common.TransformProvider;
import static org.daisy.pipeline.braille.common.TransformProvider.util.dispatch;
import static org.daisy.pipeline.braille.common.TransformProvider.util.memoize;
import static org.daisy.pipeline.braille.common.TransformProvider.util.varyLocale;
import static org.daisy.pipeline.braille.common.util.Locales.parseLocale;
import static org.daisy.pipeline.braille.common.util.Strings.join;
import org.daisy.pipeline.braille.dotify.DotifyTranslator;
import org.osgi.framework.FrameworkUtil;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.component.annotations.ReferenceCardinality;
import org.osgi.service.component.annotations.ReferencePolicy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @see <a href="../../../../../../../../../doc/">User documentation</a>.
*/
public class DotifyTranslatorImpl extends AbstractBrailleTranslator implements DotifyTranslator {
private final BrailleFilter filter;
private final boolean hyphenating;
private final Hyphenator externalHyphenator;
protected DotifyTranslatorImpl(BrailleFilter filter, boolean hyphenating) {
this.filter = filter;
this.hyphenating = hyphenating;
this.externalHyphenator = null;
}
protected DotifyTranslatorImpl(BrailleFilter filter, Hyphenator externalHyphenator) {
this.filter = filter;
this.hyphenating = true;
this.externalHyphenator = externalHyphenator;
}
public BrailleFilter asBrailleFilter() {
return filter;
}
@Override
public FromStyledTextToBraille fromStyledTextToBraille() {
return fromStyledTextToBraille;
}
private final FromStyledTextToBraille fromStyledTextToBraille = new FromStyledTextToBraille() {
public java.lang.Iterable<String> transform(java.lang.Iterable<CSSStyledText> styledText, int from, int to) {
int size = size(styledText);
if (to < 0) to = size;
String[] braille = new String[to - from];
int i = 0;
for (CSSStyledText t : styledText) {
if (i >= from && i < to)
braille[i - from] = DotifyTranslatorImpl.this.transform(t.getText(), t.getStyle());
i++; }
return Arrays.asList(braille);
}
};
@Override
public LineBreakingFromStyledText lineBreakingFromStyledText() {
return lineBreakingFromStyledText;
}
private final LineBreakingFromStyledText lineBreakingFromStyledText
= new DefaultLineBreaker() {
protected BrailleStream translateAndHyphenate(final java.lang.Iterable<CSSStyledText> styledText, int from, int to) {
return new FullyHyphenatedAndTranslatedString(join(fromStyledTextToBraille.transform(styledText)), from, to);
}
};
private String transform(String text, boolean hyphenate) {
if (hyphenate && !hyphenating)
throw new RuntimeException("'hyphens:auto' is not supported");
try {
if (hyphenate && externalHyphenator != null)
return filter.filter(Translatable.text(externalHyphenator.asFullHyphenator().transform(text)).hyphenate(false).build());
else
return filter.filter(Translatable.text(text).hyphenate(hyphenate).build()); }
catch (TranslationException e) {
throw new RuntimeException(e); }
}
public String transform(String text, SimpleInlineStyle style) {
boolean hyphenate = false;
if (style != null) {
CSSProperty val = style.getProperty("hyphens");
if (val != null) {
if (val == Hyphens.AUTO)
hyphenate = true;
else if (val == Hyphens.MANUAL)
logger.warn("hyphens:{} not supported", val);
style.removeProperty("hyphens"); }
for (String prop : style.getPropertyNames())
logger.warn("CSS property {} not supported", style.getSourceDeclaration(prop)); }
return transform(text, hyphenate);
}
@Component(
name = "org.daisy.pipeline.braille.dotify.DotifyTranslatorImpl.Provider",
service = {
DotifyTranslator.Provider.class,
BrailleTranslatorProvider.class,
TransformProvider.class
}
)
public static class Provider extends AbstractTransformProvider<DotifyTranslator>
implements DotifyTranslator.Provider {
public Iterable<DotifyTranslator> _get(Query query) {
MutableQuery q = mutableQuery(query);
for (Feature f : q.removeAll("input"))
if (!supportedInput.contains(f.getValue().get()))
return empty;
for (Feature f : q.removeAll("output"))
if (!supportedOutput.contains(f.getValue().get()))
return empty;
if (q.containsKey("translator"))
if (!"dotify".equals(q.removeOnly("translator").getValue().get()))
return empty;
return logSelect(q, _provider);
}
private final static Iterable<DotifyTranslator> empty = Iterables.<DotifyTranslator>empty();
// "text-css" not supported: CSS styles not recognized and line breaking and white space
// processing not according to CSS
private final static List<String> supportedInput = Collections.emptyList();
private final static List<String> supportedOutput = ImmutableList.of("braille");
private TransformProvider<DotifyTranslator> _provider
= varyLocale(
new AbstractTransformProvider<DotifyTranslator>() {
public Iterable<DotifyTranslator> _get(Query query) {
MutableQuery q = mutableQuery(query);
if (q.containsKey("locale")) {
final String locale; {
try {
locale = parseLocale(q.removeOnly("locale").getValue().get()).toLanguageTag(); }
catch (IllegalArgumentException e) {
logger.error("Invalid locale", e);
return empty; }
}
final String mode = TranslatorMode.Builder.withType(TranslatorType.UNCONTRACTED).build().toString();
String v = null;
if (q.containsKey("hyphenator"))
v = q.removeOnly("hyphenator").getValue().get();
else
v = "auto";
final String hyphenator = v;
if (!q.isEmpty()) {
logger.warn("Unsupported feature '"+ q.iterator().next().getKey() + "'");
return empty; }
Iterable<BrailleFilter> filters = Iterables.transform(
factoryServices,
new Function<BrailleFilterFactoryService,BrailleFilter>() {
public BrailleFilter _apply(BrailleFilterFactoryService service) {
try {
if (service.supportsSpecification(locale, mode))
return service.newFactory().newFilter(locale, mode); }
catch (TranslatorConfigurationException e) {
logger.error("Could not create BrailleFilter for locale " + locale + " and mode " + mode, e); }
throw new NoSuchElementException(); }});
return concat(
Iterables.transform(
filters,
new Function<BrailleFilter,Iterable<DotifyTranslator>>() {
public Iterable<DotifyTranslator> _apply(final BrailleFilter filter) {
Iterable<DotifyTranslator> translators = empty;
if (!"none".equals(hyphenator)) {
MutableQuery hyphenatorQuery = mutableQuery();
if (!"auto".equals(hyphenator))
hyphenatorQuery.add("hyphenator", hyphenator);
hyphenatorQuery.add("locale", locale);
Iterable<Hyphenator> hyphenators = logSelect(hyphenatorQuery, hyphenatorProvider);
translators = Iterables.transform(
hyphenators,
new Function<Hyphenator,DotifyTranslator>() {
public DotifyTranslator _apply(Hyphenator hyphenator) {
return __apply(
logCreate(
(DotifyTranslator)new DotifyTranslatorImpl(filter, hyphenator))); }}); }
if ("auto".equals(hyphenator))
translators = concat(
translators,
Iterables.of(
logCreate((DotifyTranslator)new DotifyTranslatorImpl(filter, true))));
if ("none".equals(hyphenator))
translators = concat(
translators,
Iterables.of(
logCreate((DotifyTranslator)new DotifyTranslatorImpl(filter, false))));
return translators;
}
}
)
);
}
return empty;
}
}
);
private final List<BrailleFilterFactoryService> factoryServices = new ArrayList<BrailleFilterFactoryService>();
@Reference(
name = "BrailleFilterFactoryService",
unbind = "unbindBrailleFilterFactoryService",
service = BrailleFilterFactoryService.class,
cardinality = ReferenceCardinality.MULTIPLE,
policy = ReferencePolicy.DYNAMIC
)
protected void bindBrailleFilterFactoryService(BrailleFilterFactoryService service) {
if (!OSGiHelper.inOSGiContext())
service.setCreatedWithSPI();
factoryServices.add(service);
invalidateCache();
}
protected void unbindBrailleFilterFactoryService(BrailleFilterFactoryService service) {
factoryServices.remove(service);
invalidateCache();
}
@Reference(
name = "HyphenatorProvider",
unbind = "unbindHyphenatorProvider",
service = HyphenatorProvider.class,
cardinality = ReferenceCardinality.MULTIPLE,
policy = ReferencePolicy.DYNAMIC
)
@SuppressWarnings(
"unchecked" // safe cast to TransformProvider<Hyphenator>
)
protected void bindHyphenatorProvider(HyphenatorProvider<?> provider) {
hyphenatorProviders.add((TransformProvider<Hyphenator>)provider);
hyphenatorProvider.invalidateCache();
logger.debug("Adding Hyphenator provider: " + provider);
}
protected void unbindHyphenatorProvider(HyphenatorProvider<?> provider) {
hyphenatorProviders.remove(provider);
hyphenatorProvider.invalidateCache();
logger.debug("Removing Hyphenator provider: " + provider);
}
private List<TransformProvider<Hyphenator>> hyphenatorProviders
= new ArrayList<TransformProvider<Hyphenator>>();
private TransformProvider.util.MemoizingProvider<Hyphenator> hyphenatorProvider
= memoize(dispatch(hyphenatorProviders));
@Override
public ToStringHelper toStringHelper() {
return MoreObjects.toStringHelper(DotifyTranslatorImpl.Provider.class.getName());
}
}
private static final Logger logger = LoggerFactory.getLogger(DotifyTranslatorImpl.class);
private static abstract class OSGiHelper {
static boolean inOSGiContext() {
try {
return FrameworkUtil.getBundle(OSGiHelper.class) != null;
} catch (NoClassDefFoundError e) {
return false;
}
}
}
}
| |
/* -------------------------------------------------------------------------- *
* OpenSim: JTreeTable.java *
* -------------------------------------------------------------------------- *
* OpenSim is a toolkit for musculoskeletal modeling and simulation, *
* developed as an open source project by a worldwide community. Development *
* and support is coordinated from Stanford University, with funding from the *
* U.S. NIH and DARPA. See http://opensim.stanford.edu and the README file *
* for more information including specific grant numbers. *
* *
* Copyright (c) 2005-2017 Stanford University and the Authors *
* Author(s): Ayman Habib *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); you may *
* not use this file except in compliance with the License. You may obtain a *
* copy of the License at http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
* -------------------------------------------------------------------------- */
package org.opensim.view.editors;
/*
* @(#)JTreeTable.java 1.2 98/10/27
*
* Copyright 1997, 1998 by Sun Microsystems, Inc.,
* 901 San Antonio Road, Palo Alto, California, 94303, U.S.A.
* All rights reserved.
*
* This software is the confidential and proprietary information
* of Sun Microsystems, Inc. ("Confidential Information"). You
* shall not disclose such Confidential Information and shall use
* it only in accordance with the terms of the license agreement
* you entered into with Sun.
*/
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.event.MouseEvent;
import java.util.EventObject;
import javax.swing.JTable;
import javax.swing.JTree;
import javax.swing.ListSelectionModel;
import javax.swing.LookAndFeel;
import javax.swing.UIManager;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.TableCellEditor;
import javax.swing.table.TableCellRenderer;
import javax.swing.tree.DefaultTreeCellRenderer;
import javax.swing.tree.DefaultTreeSelectionModel;
import javax.swing.tree.TreeCellRenderer;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreePath;
/**
* This example shows how to create a simple JTreeTable component,
* by using a JTree as a renderer (and editor) for the cells in a
* particular column in the JTable.
*
* @version 1.2 10/27/98
*
* @author Philip Milne
* @author Scott Violet
*/
public class JTreeTable extends JTable {
/** A subclass of JTree. */
protected TreeTableCellRenderer tree;
protected boolean isEditable = false;
public JTreeTable(TreeTableModel treeTableModel) {
super();
putClientProperty("terminateEditOnFocusLost", Boolean.TRUE);
// Create the tree. It will be used as a renderer and editor.
tree = new TreeTableCellRenderer(treeTableModel);
// Install a tableModel representing the visible rows in the tree.
super.setModel(new TreeTableModelAdapter(treeTableModel, tree));
// Force the JTable and JTree to share their row selection models.
ListToTreeSelectionModelWrapper selectionWrapper = new
ListToTreeSelectionModelWrapper();
tree.setSelectionModel(selectionWrapper);
setSelectionModel(selectionWrapper.getListSelectionModel());
// Install the tree editor renderer and editor.
setDefaultRenderer(TreeTableModel.class, tree);
setDefaultEditor(TreeTableModel.class, new TreeTableCellEditor());
// No grid.
setShowGrid(false);
// No intercell spacing
setIntercellSpacing(new Dimension(0, 0));
// And update the height of the trees row to match that of
// the table.
if (tree.getRowHeight() < 1) {
// Metal looks better like this.
setRowHeight(18);
}
}
/**
* Overridden to message super and forward the method to the tree.
* Since the tree is not actually in the component hieachy it will
* never receive this unless we forward it in this manner.
*/
public void updateUI() {
super.updateUI();
if(tree != null) {
tree.updateUI();
}
// Use the tree's default foreground and background colors in the
// table.
LookAndFeel.installColorsAndFont(this, "Tree.background",
"Tree.foreground", "Tree.font");
}
/* Workaround for BasicTableUI anomaly. Make sure the UI never tries to
* paint the editor. The UI currently uses different techniques to
* paint the renderers and editors and overriding setBounds() below
* is not the right thing to do for an editor. Returning -1 for the
* editing row in this case, ensures the editor is never painted.
*/
public int getEditingRow() {
return (getColumnClass(editingColumn) == TreeTableModel.class) ? -1 :
editingRow;
}
/**
* Overridden to pass the new rowHeight to the tree.
*/
public void setRowHeight(int rowHeight) {
super.setRowHeight(rowHeight);
if (tree != null && tree.getRowHeight() != rowHeight) {
tree.setRowHeight(getRowHeight());
}
}
/**
* Returns the tree that is being shared between the model.
*/
public JTree getTree() {
return tree;
}
/**
* A TreeCellRenderer that displays a JTree.
*/
public class TreeTableCellRenderer extends JTree implements
TableCellRenderer {
/** Last table/tree row asked to renderer. */
protected int visibleRow;
public TreeTableCellRenderer(TreeModel model) {
super(model);
}
/**
* updateUI is overridden to set the colors of the Tree's renderer
* to match that of the table.
*/
public void updateUI() {
super.updateUI();
// Make the tree's cell renderer use the table's cell selection
// colors.
TreeCellRenderer tcr = getCellRenderer();
if (tcr instanceof DefaultTreeCellRenderer) {
DefaultTreeCellRenderer dtcr = ((DefaultTreeCellRenderer)tcr);
// dtcr.setBorderSelectionColor(null);
dtcr.setTextSelectionColor(UIManager.getColor
("Table.selectionForeground"));
dtcr.setBackgroundSelectionColor(UIManager.getColor
("Table.selectionBackground"));
}
}
/**
* Sets the row height of the tree, and forwards the row height to
* the table.
*/
public void setRowHeight(int rowHeight) {
if (rowHeight > 0) {
super.setRowHeight(rowHeight);
if (JTreeTable.this != null &&
JTreeTable.this.getRowHeight() != rowHeight) {
JTreeTable.this.setRowHeight(getRowHeight());
}
}
}
/**
* This is overridden to set the height to match that of the JTable.
*/
public void setBounds(int x, int y, int w, int h) {
super.setBounds(x, 0, w, JTreeTable.this.getHeight());
}
/**
* Sublcassed to translate the graphics such that the last visible
* row will be drawn at 0,0.
*/
public void paint(Graphics g) {
g.translate(0, -visibleRow * getRowHeight());
super.paint(g);
}
/**
* TreeCellRenderer method. Overridden to update the visible row.
*/
public Component getTableCellRendererComponent(JTable table,
Object value,
boolean isSelected,
boolean hasFocus,
int row, int column) {
if(isSelected)
setBackground(table.getSelectionBackground());
else
setBackground(table.getBackground());
visibleRow = row;
return this;
}
}
/**
* TreeTableCellEditor implementation. Component returned is the
* JTree.
*/
public class TreeTableCellEditor extends AbstractCellEditor implements
TableCellEditor {
public Component getTableCellEditorComponent(JTable table,
Object value,
boolean isSelected,
int r, int c) {
return tree;
}
/**
* Overridden to return false, and if the event is a mouse event
* it is forwarded to the tree.<p>
* The behavior for this is debatable, and should really be offered
* as a property. By returning false, all keyboard actions are
* implemented in terms of the table. By returning true, the
* tree would get a chance to do something with the keyboard
* events. For the most part this is ok. But for certain keys,
* such as left/right, the tree will expand/collapse where as
* the table focus should really move to a different column. Page
* up/down should also be implemented in terms of the table.
* By returning false this also has the added benefit that clicking
* outside of the bounds of the tree node, but still in the tree
* column will select the row, whereas if this returned true
* that wouldn't be the case.
* <p>By returning false we are also enforcing the policy that
* the tree will never be editable (at least by a key sequence).
*/
public boolean isCellEditable(EventObject e) {
if (e instanceof MouseEvent) {
for (int counter = getColumnCount() - 1; counter >= 0;
counter--) {
if (getColumnClass(counter) == TreeTableModel.class) {
MouseEvent me = (MouseEvent)e;
MouseEvent newME = new MouseEvent(tree, me.getID(),
me.getWhen(), me.getModifiers(),
me.getX() - getCellRect(0, counter, true).x,
me.getY(), me.getClickCount(),
me.isPopupTrigger());
tree.dispatchEvent(newME);
break;
}
}
}
return false;
}
}
/**
* ListToTreeSelectionModelWrapper extends DefaultTreeSelectionModel
* to listen for changes in the ListSelectionModel it maintains. Once
* a change in the ListSelectionModel happens, the paths are updated
* in the DefaultTreeSelectionModel.
*/
class ListToTreeSelectionModelWrapper extends DefaultTreeSelectionModel {
/** Set to true when we are updating the ListSelectionModel. */
protected boolean updatingListSelectionModel;
public ListToTreeSelectionModelWrapper() {
super();
getListSelectionModel().addListSelectionListener
(createListSelectionListener());
}
/**
* Returns the list selection model. ListToTreeSelectionModelWrapper
* listens for changes to this model and updates the selected paths
* accordingly.
*/
ListSelectionModel getListSelectionModel() {
return listSelectionModel;
}
/**
* This is overridden to set <code>updatingListSelectionModel</code>
* and message super. This is the only place DefaultTreeSelectionModel
* alters the ListSelectionModel.
*/
public void resetRowSelection() {
if(!updatingListSelectionModel) {
updatingListSelectionModel = true;
try {
super.resetRowSelection();
}
finally {
updatingListSelectionModel = false;
}
}
// Notice how we don't message super if
// updatingListSelectionModel is true. If
// updatingListSelectionModel is true, it implies the
// ListSelectionModel has already been updated and the
// paths are the only thing that needs to be updated.
}
/**
* Creates and returns an instance of ListSelectionHandler.
*/
protected ListSelectionListener createListSelectionListener() {
return new ListSelectionHandler();
}
/**
* If <code>updatingListSelectionModel</code> is false, this will
* reset the selected paths from the selected rows in the list
* selection model.
*/
protected void updateSelectedPathsFromSelectedRows() {
if(!updatingListSelectionModel) {
updatingListSelectionModel = true;
try {
// This is way expensive, ListSelectionModel needs an
// enumerator for iterating.
int min = listSelectionModel.getMinSelectionIndex();
int max = listSelectionModel.getMaxSelectionIndex();
clearSelection();
if(min != -1 && max != -1) {
for(int counter = min; counter <= max; counter++) {
if(listSelectionModel.isSelectedIndex(counter)) {
TreePath selPath = tree.getPathForRow
(counter);
if(selPath != null) {
addSelectionPath(selPath);
}
}
}
}
}
finally {
updatingListSelectionModel = false;
}
}
}
/**
* Class responsible for calling updateSelectedPathsFromSelectedRows
* when the selection of the list changse.
*/
class ListSelectionHandler implements ListSelectionListener {
public void valueChanged(ListSelectionEvent e) {
updateSelectedPathsFromSelectedRows();
}
}
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.reports.exam;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.TreeSet;
import java.util.Vector;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.unitime.timetable.model.ExamPeriod;
import org.unitime.timetable.model.ExamType;
import org.unitime.timetable.model.Session;
import org.unitime.timetable.model.SubjectArea;
import org.unitime.timetable.solver.exam.ui.ExamAssignmentInfo;
import org.unitime.timetable.solver.exam.ui.ExamRoomInfo;
import org.unitime.timetable.solver.exam.ui.ExamInfo.ExamSectionInfo;
import com.lowagie.text.DocumentException;
/**
* @author Tomas Muller
*/
public class ExamPeriodChartReport extends PdfLegacyExamReport {
protected static Log sLog = LogFactory.getLog(ScheduleByCourseReport.class);
public ExamPeriodChartReport(int mode, File file, Session session, ExamType examType, Collection<SubjectArea> subjectAreas, Collection<ExamAssignmentInfo> exams) throws IOException, DocumentException {
super(mode, file, "PERIOD ASSIGNMENT", session, examType, subjectAreas, exams);
}
public void printReport() throws DocumentException {
if (iRC!=null && iRC.length()>0)
setFooter(iRC+(iLimit>=0?" (limit="+iLimit+")":""));
else if (iLimit>=0)
setFooter("limit="+iLimit);
Hashtable<ExamPeriod,TreeSet<ExamAssignmentInfo>> period2exams = new Hashtable();
for (ExamAssignmentInfo exam : getExams()) {
if (exam.getPeriod()==null || !hasSubjectArea(exam)) continue;
TreeSet<ExamAssignmentInfo> exams = period2exams.get(exam.getPeriod());
if (exams==null) {
exams = new TreeSet();
period2exams.put(exam.getPeriod(),exams);
}
exams.add(exam);
}
HashMap<Integer,String> times = new HashMap<Integer, String>();
HashMap<Integer,Cell> fixedTimes = new HashMap<Integer, Cell>();
HashMap<Integer,String> days = new HashMap<Integer, String>();
for (Iterator i=ExamPeriod.findAll(getSession().getUniqueId(), getExamType()).iterator();i.hasNext();) {
ExamPeriod period = (ExamPeriod)i.next();
times.put(period.getStartSlot(), period.getStartTimeLabel());
days.put(period.getDateOffset(), period.getStartDateLabel());
fixedTimes.put(period.getStartSlot(), lpad(period.getStartTimeLabel(),'0',6));
}
boolean headerPrinted = false;
Hashtable totalADay = new Hashtable();
String timesThisPage = null;
int nrCols = 0;
if (!iTotals) {
if (iCompact) {
setHeaderLine(new Line(
rpad("Start Time", 10).withSeparator("| "),
rpad("Exam", 15).withSeparator(""), rpad(" ", 1).withSeparator(""), rpad("Enrl", 4).withSeparator("| "),
rpad("Exam", 15).withSeparator(""), rpad(" ", 1).withSeparator(""), rpad("Enrl", 4).withSeparator("| "),
rpad("Exam", 15).withSeparator(""), rpad(" ", 1).withSeparator(""), rpad("Enrl", 4).withSeparator("| "),
rpad("Exam", 15).withSeparator(""), rpad(" ", 1).withSeparator(""), rpad("Enrl", 4).withSeparator("| "),
rpad("Exam", 15).withSeparator(""), rpad(" ", 1).withSeparator(""), rpad("Enrl", 4)),
new Line(rpad("", '-', 10).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4)));
} else {
setHeaderLine(new Line(
rpad("Start Time", 10).withSeparator("|"),
rpad("Exam", 24).withSeparator(""), rpad(" ", 1).withSeparator(""), rpad("Enrl", 4).withSeparator("| "),
rpad("Exam", 24).withSeparator(""), rpad(" ", 1).withSeparator(""), rpad("Enrl", 4).withSeparator("| "),
rpad("Exam", 24).withSeparator(""), rpad(" ", 1).withSeparator(""), rpad("Enrl", 4).withSeparator("| "),
rpad("Exam", 24).withSeparator(""), rpad(" ", 1).withSeparator(""), rpad("Enrl", 4)),
new Line(rpad("", '-', 10).withSeparator("|"),
rpad("", '-', 24).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 24).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 24).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 24).withColSpan(2), rpad("", '-', 4)));
}
printHeader();
}
int lastDIdx = 0;
boolean firstLine = true;
for (int dIdx = 0; dIdx < days.size(); dIdx+=nrCols) {
for (int time: new TreeSet<Integer>(times.keySet())) {
int offset = 0;
String timeStr = times.get(time);
List<Cell> header1 = new ArrayList<Cell>();
List<Cell> header2 = new ArrayList<Cell>();
List<Cell> header3 = new ArrayList<Cell>();
Vector periods = new Vector();
int idx = 0;
String firstDay = null; int firstDayOffset = 0;
String lastDay = null;
nrCols = 0;
for (Iterator<Integer> f = new TreeSet<Integer>(days.keySet()).iterator(); f.hasNext(); idx++) {
int day = f.next();
String dayStr = days.get(day);
if (idx<dIdx || nrCols==(iCompact?iTotals?6:5:4)) continue;
if (firstDay!=null && (dayStr.startsWith("Mon") || day>=firstDayOffset+7)) break;
if (firstDay==null) {
firstDay = dayStr; firstDayOffset = day;
Calendar c = Calendar.getInstance(Locale.US);
c.setTime(getSession().getExamBeginDate());
c.add(Calendar.DAY_OF_YEAR, day);
if (!iTotals) {
offset = (c.get(Calendar.DAY_OF_WEEK)+5)%7;
firstDayOffset -= offset;
}
}
lastDay = dayStr;
if (iCompact) {
header1.add(mpad(dayStr,20).withSeparator("| ").withColSpan(3));
header2.add(rpad("Exam", 15).withSeparator(""));
header2.add(rpad(" ", 1).withSeparator(""));
header2.add(rpad("Enrl", 4).withSeparator("| "));
header3.add(lpad("", '=', 15).withColSpan(2)); header3.add(lpad("", '=', 4).withSeparator("| "));
} else {
header1.add(mpad(dayStr,29).withSeparator("| ").withColSpan(3));
header2.add(rpad("Exam", 24).withSeparator(""));
header2.add(rpad(" ", 1).withSeparator(""));
header2.add(rpad("Enrl", 4).withSeparator("| "));
header3.add(lpad("", '=', 24).withColSpan(2)); header3.add(lpad("", '=', 4).withSeparator("| "));
}
ExamPeriod period = null;
nrCols++;
for (Iterator i=ExamPeriod.findAll(getSession().getUniqueId(), getExamType()).iterator();i.hasNext();) {
ExamPeriod p = (ExamPeriod)i.next();
if (time!=p.getStartSlot() || day!=p.getDateOffset()) continue;
period = p; break;
}
periods.add(period);
}
if (iTotals)
setHeaderLine(
new Line(new Cell(timeStr).withColSpan(header2.size())),
new Line(header1.toArray(new Cell[header1.size()])),
new Line(header2.toArray(new Cell[header2.size()])),
new Line(header3.toArray(new Cell[header3.size()])));
else if (offset + periods.size() > (iCompact?iTotals?6:5:4))
offset = Math.max(0, (iCompact?iTotals?6:5:4) - periods.size());
int nextLines = 0;
for (Enumeration f=periods.elements();f.hasMoreElements();) {
ExamPeriod period = (ExamPeriod)f.nextElement();
if (period==null) continue;
TreeSet<ExamAssignmentInfo> exams = period2exams.get(period);
if (exams==null) continue;
int linesThisSections = 6;
for (ExamAssignmentInfo exam : exams) {
int size = 0;
for (ExamSectionInfo section: exam.getSectionsIncludeCrosslistedDummies()) size+= section.getNrStudents();
if (iLimit<0 || size>=iLimit) {
for (ExamSectionInfo section: exam.getSectionsIncludeCrosslistedDummies())
if (hasSubjectArea(section)) linesThisSections++;
}
}
nextLines = Math.max(nextLines,linesThisSections);
}
if (iTotals) {
if (!headerPrinted) {
printHeader();
setPageName(timeStr+(days.size()>nrCols?" ("+firstDay+" - "+lastDay+")":""));
setCont(timeStr+(days.size()>nrCols?" ("+firstDay+" - "+lastDay+")":""));
timesThisPage = timeStr;
} else if (timesThisPage!=null && (getNrLinesPerPage() == 0 || getLineNumber()+nextLines<=getNrLinesPerPage())) {
println(new Line());
printHeader(false);
timesThisPage += ", "+timeStr;
setPageName(timesThisPage+(days.size()>nrCols?" ("+firstDay+" - "+lastDay+")":""));
setCont(timesThisPage+(days.size()>nrCols?" ("+firstDay+" - "+lastDay+")":""));
} else {
newPage();
timesThisPage = timeStr;
setPageName(timeStr+(days.size()>nrCols?" ("+firstDay+" - "+lastDay+")":""));
setCont(timeStr+(days.size()>nrCols?" ("+firstDay+" - "+lastDay+")":""));
}
} else {
if (nextLines==0) continue;
if (!iNewPage && !firstLine) {
if (lastDIdx!=dIdx) {
if (iCompact)
printSeparator(rpad("", '-', 10).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4));
else
printSeparator(rpad("", '-', 10),
rpad("", '-', 24).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 24).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 24).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 24).withColSpan(2), rpad("", '-', 4));
lastDIdx = dIdx;
} else {
if (iCompact)
printSeparator(lpad("", ' ', 10).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 15).withColSpan(2), rpad("", '-', 4));
else
printSeparator(lpad("", ' ', 10),
rpad("", '-', 24).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 24).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 24).withColSpan(2), rpad("", '-', 4).withSeparator("| "),
rpad("", '-', 24).withColSpan(2), rpad("", '-', 4));
}
}
firstLine = false;
setCont(firstDay+" - "+lastDay+" "+fixedTimes.get(time));
setPageName(firstDay+" - "+lastDay+" "+fixedTimes.get(time));
}
headerPrinted = true;
int max = 0;
Vector lines = new Vector();
for (Enumeration f=periods.elements();f.hasMoreElements();) {
ExamPeriod period = (ExamPeriod)f.nextElement();
if (period==null) {
Vector linesThisPeriod = new Vector();
linesThisPeriod.add(new Cell[] {lpad("", iCompact ? 15 : 24).withColSpan(2), lpad("0",5)});
lines.add(linesThisPeriod);
continue;
}
TreeSet<ExamAssignmentInfo> exams = period2exams.get(period);
if (exams==null) exams = new TreeSet();
Vector<Cell[]> linesThisPeriod = new Vector<Cell[]>();
int total = 0;
int totalListed = 0;
for (ExamAssignmentInfo exam : exams) {
boolean sizePrinted = false;
int size = 0;
for (ExamSectionInfo section: exam.getSectionsIncludeCrosslistedDummies()) size+= section.getNrStudents();
for (ExamSectionInfo section : exam.getSectionsIncludeCrosslistedDummies()) {
if (!hasSubjectArea(section)) continue;
total += section.getNrStudents();
if (iLimit>=0 && size<iLimit) continue;
totalListed += section.getNrStudents();
String code = null;
if (iRoomCodes!=null && !iRoomCodes.isEmpty()) {
for (ExamRoomInfo room : section.getExamAssignment().getRooms()) {
String c = iRoomCodes.get(room.getName());
if (c!=null) code = c; break;
}
}
if (iCompact) {
linesThisPeriod.add(
new Cell[] {
new Cell(
rpad(section.getSubject(),7).withSeparator(""),
rpad(section.getCourseNbr(),8).withSeparator("")).withSeparator(""),
new Cell(String.valueOf(sizePrinted||code==null||code.length()==0?' ':code.charAt(0))).withSeparator(""),
lpad(sizePrinted?"":String.valueOf(size),4)
});
} else {
if (iItype) {
if (iExternal) {
linesThisPeriod.add(
new Cell[] {
new Cell(
rpad(section.getSubject(),7).withSeparator(""),
rpad(section.getCourseNbr(),8).withSeparator(""),
rpad(section.getItype(),9).withSeparator("")).withSeparator(""),
new Cell(String.valueOf(sizePrinted||code==null||code.length()==0?' ':code.charAt(0))).withSeparator(""),
lpad(sizePrinted?"":String.valueOf(size),4)
});
} else {
linesThisPeriod.add(
new Cell[] {
rpad(section.getName(),24).withSeparator(""),
new Cell(String.valueOf(sizePrinted||code==null||code.length()==0?' ':code.charAt(0))).withSeparator(""),
lpad(sizePrinted?"":String.valueOf(size),4)
});
}
} else {
linesThisPeriod.add(
new Cell[] {
new Cell(
rpad(section.getSubject(),7).withSeparator(""),
rpad(section.getCourseNbr(),8).withSeparator(""),
rpad(section.getItype(),9).withSeparator("")).withSeparator(""),
new Cell(String.valueOf(sizePrinted||code==null||code.length()==0?' ':code.charAt(0))).withSeparator(""),
lpad(sizePrinted?"":String.valueOf(size),4)
});
}
}
sizePrinted = true;
}
}
if (iCompact) {
if (iTotals) {
if (totalListed!=total)
linesThisPeriod.insertElementAt(new Cell[] {mpad("("+totalListed+")",14).withColSpan(2), lpad(""+total,6)}, 0);
else
linesThisPeriod.insertElementAt(new Cell[] {lpad(""+total,20).withColSpan(3)}, 0);
} else {
linesThisPeriod.insertElementAt(new Cell[] {rpad(period.getStartDateLabel(),14).withColSpan(2), lpad(total==0?"":(""+total),6)}, 0);
}
} else {
if (iTotals) {
if (totalListed!=total)
linesThisPeriod.insertElementAt(new Cell[] {mpad("("+totalListed+")",23).withColSpan(2), lpad(""+total,6)}, 0);
else
linesThisPeriod.insertElementAt(new Cell[] {lpad(""+total,29).withColSpan(3)}, 0);
} else {
linesThisPeriod.insertElementAt(new Cell[] {rpad(period.getStartDateLabel(),23).withColSpan(2), lpad(total==0?"":(""+total),6)}, 0);
}
}
max = Math.max(max, linesThisPeriod.size());
Integer td = (Integer)totalADay.get(period.getDateOffset());
totalADay.put(period.getDateOffset(),Integer.valueOf(total+(td==null?0:td.intValue())));
lines.add(linesThisPeriod);
}
for (int i=0;i<max;i++) {
List<Cell> line = new ArrayList<Cell>();
if (!iTotals) {
if (iCompact) {
if (i==0 || iNewPage) {
line.add(rpad(fixedTimes.get(time),10).withSeparator("| "));
for (int c = 0; c < offset; c++)
line.add(rpad("",20).withSeparator("| ").withColSpan(3));
} else {
line.add(rpad("",10).withSeparator("| "));
for (int c = 0; c < offset; c++)
line.add(rpad("",20).withSeparator("| ").withColSpan(3));
}
} else {
if (i==0 || iNewPage) {
line.add(rpad(fixedTimes.get(time),10).withSeparator("|"));
for (int c = 0; c < offset; c++)
line.add(rpad("",29).withSeparator("| ").withColSpan(3));
} else {
line.add(rpad("",10).withSeparator("|"));
for (int c = 0; c < offset; c++)
line.add(rpad("",29).withSeparator("| ").withColSpan(3));
}
}
}
for (Enumeration f=lines.elements();f.hasMoreElements();) {
Vector linesThisPeriod = (Vector)f.nextElement();
if (i < linesThisPeriod.size()) {
Cell[] c = (Cell[])linesThisPeriod.elementAt(i);
for (int j = 0; j < c.length; j++)
line.add(c[j].withSeparator(j + 1 == c.length ? "| " : ""));
} else {
line.add(rpad("",iCompact ? 20 : 29).withColSpan(3).withSeparator("| "));
}
}
if (!iTotals)
for (int c = offset + lines.size(); c < (iCompact ? 5 : 4); c++) {
if (iCompact) {
line.add(rpad("",20).withSeparator("| ").withColSpan(3));
} else {
line.add(rpad("",29).withSeparator("|").withColSpan(3));
}
}
println(line.toArray(new Cell[line.size()]));
}
setCont(null);
}
if (iTotals) {
setHeaderLine();
if (getLineNumber()+5>getNrLinesPerPage() && getNrLinesPerPage() > 0) {
newPage();
setPageName("Totals");
} else
println(new Line());
List<Cell> line1 = new ArrayList<Cell>();
List<Cell> line2 = new ArrayList<Cell>();
List<Cell> line3 = new ArrayList<Cell>();
int idx = 0;
for (Iterator<Integer> f = new TreeSet<Integer>(days.keySet()).iterator(); f.hasNext(); idx++) {
int day = f.next();
if (idx<dIdx || idx>=dIdx+nrCols) continue;
if (iCompact) {
line1.add(mpad((String)days.get(day),20).withSeparator("| ").withColSpan(3));
line2.add(lpad("", '=', 15).withSeparator("")); line2.add(lpad("", ' ', 1).withSeparator("")); line2.add(lpad("", '=', 4).withSeparator("| "));
line3.add(lpad(totalADay.get(day)==null?"":totalADay.get(day).toString(),20).withColSpan(3).withSeparator("| "));
} else {
line1.add(mpad((String)days.get(day),29).withSeparator("| ").withColSpan(3));
line2.add(lpad("", '=', 24).withSeparator("")); line2.add(lpad("", ' ', 1).withSeparator("")); line2.add(lpad("", '=', 4).withSeparator("| "));
line3.add(lpad(totalADay.get(day)==null?"":totalADay.get(day).toString(),29).withColSpan(3).withSeparator("| "));
}
}
setHeaderLine(
new Line(new Cell("Total Student Exams").withColSpan(line1.size() * 3)),
new Line(line1.toArray(new Cell[line1.size()])),
new Line(line2.toArray(new Cell[line2.size()])));
printHeader(false);
println(line3.toArray(new Cell[line3.size()]));
timesThisPage = null;
}
}
lastPage();
}
}
| |
/*
Copyright 2011 Selenium committers
Copyright 2011 Software Freedom Conservancy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.grid.internal;
import com.google.common.base.Charsets;
import com.google.common.io.ByteStreams;
import com.google.common.net.MediaType;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.HttpRequest;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.message.BasicHttpEntityEnclosingRequest;
import org.apache.http.message.BasicHttpRequest;
import org.apache.http.util.EntityUtils;
import org.openqa.grid.common.exception.ClientGoneException;
import org.openqa.grid.common.exception.GridException;
import org.openqa.grid.internal.listeners.CommandListener;
import org.openqa.grid.web.Hub;
import org.openqa.grid.web.servlet.handler.LegacySeleniumRequest;
import org.openqa.grid.web.servlet.handler.RequestType;
import org.openqa.grid.web.servlet.handler.SeleniumBasedRequest;
import org.openqa.grid.web.servlet.handler.SeleniumBasedResponse;
import org.openqa.grid.web.servlet.handler.WebDriverRequest;
import org.openqa.selenium.io.IOUtils;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.DateFormat;
import java.util.Calendar;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.logging.Logger;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* Represent a running test for the hub/registry. A test session is created when a TestSlot becomes
* available for a test. <p/> The session is destroyed when the test ends ( ended by the client or
* timed out)
*/
@SuppressWarnings("JavaDoc")
public class TestSession {
private static final Logger log = Logger.getLogger(TestSession.class.getName());
static final int MAX_IDLE_TIME_BEFORE_CONSIDERED_ORPHANED = 5000;
private final String internalKey;
private final TestSlot slot;
private volatile ExternalSessionKey externalKey = null;
private volatile long sessionCreatedAt;
private volatile long lastActivity;
private final Map<String, Object> requestedCapabilities;
private Map<String, Object> objects = Collections.synchronizedMap(new HashMap<String, Object>());
private volatile boolean ignoreTimeout = false;
private final TimeSource timeSource;
private volatile boolean forwardingRequest;
private final int MAX_NETWORK_LATENCY = 1000;
public String getInternalKey() {
return internalKey;
}
/**
* Creates a test session on the specified testSlot.
*/
public TestSession(TestSlot slot, Map<String, Object> requestedCapabilities,
TimeSource timeSource) {
internalKey = UUID.randomUUID().toString();
this.slot = slot;
this.requestedCapabilities = requestedCapabilities;
this.timeSource = timeSource;
lastActivity = this.timeSource.currentTimeInMillis();
}
/**
* the capabilities the client requested. It will match the TestSlot capabilities, but is not
* equals.
*/
public Map<String, Object> getRequestedCapabilities() {
return requestedCapabilities;
}
/**
* Get the session key from the remote. It's up to the remote to guarantee the key is unique. If 2
* remotes return the same session key, the tests will overwrite each other.
*
* @return the key that was provided by the remote when the POST /session command was sent.
*/
public ExternalSessionKey getExternalKey() {
return externalKey;
}
/**
* associate this session to the session provided by the remote.
*/
public void setExternalKey(ExternalSessionKey externalKey) {
this.externalKey = externalKey;
sessionCreatedAt = lastActivity;
}
/**
* give the time in milliseconds since the last access to this test session, or 0 is ignore time
* out has been set to true.
*
* @return time in millis
* @see TestSession#setIgnoreTimeout(boolean)
*/
public long getInactivityTime() {
if (ignoreTimeout) {
return 0;
} else {
return timeSource.currentTimeInMillis() - lastActivity;
}
}
public boolean isOrphaned() {
final long elapsedSinceCreation = timeSource.currentTimeInMillis() - sessionCreatedAt;
// The session needs to have been open for at least the time interval and we need to have not
// seen any new commands during that time frame.
return slot.getProtocol().isSelenium()
&& elapsedSinceCreation > MAX_IDLE_TIME_BEFORE_CONSIDERED_ORPHANED
&& sessionCreatedAt == lastActivity;
}
/**
* @return the TestSlot this session is executed against.
*/
public TestSlot getSlot() {
return slot;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((internalKey == null) ? 0 : internalKey.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
TestSession other = (TestSession) obj;
return internalKey.equals(other.internalKey);
}
@Override
public String toString() {
return externalKey != null ? "ext. key " + externalKey : internalKey
+ " (int. key, remote not contacted yet.)";
}
private HttpClient getClient() {
Registry reg = slot.getProxy().getRegistry();
int browserTimeout = reg.getConfiguration().getBrowserTimeout();
if (browserTimeout > 0){
final int selenium_server_cleanup_cycle = browserTimeout / 10;
browserTimeout += (selenium_server_cleanup_cycle + MAX_NETWORK_LATENCY);
browserTimeout *=2; // Lets not let this happen too often
}
return slot.getProxy().getHttpClientFactory().getGridHttpClient(browserTimeout, browserTimeout);
}
/**
* forwards the request to the node.
*/
public String forward(SeleniumBasedRequest request, HttpServletResponse response,
boolean newSessionRequest)
throws IOException {
String res = null;
String currentThreadName = Thread.currentThread().getName();
setThreadDisplayName();
forwardingRequest = true;
try {
if (slot.getProxy() instanceof CommandListener) {
((CommandListener) slot.getProxy()).beforeCommand(this, request, response);
}
lastActivity = timeSource.currentTimeInMillis();
HttpRequest proxyRequest = prepareProxyRequest(request/*, config*/);
HttpResponse proxyResponse = sendRequestToNode(proxyRequest);
lastActivity = timeSource.currentTimeInMillis();
HttpEntity responseBody = proxyResponse.getEntity();
try {
final int statusCode = proxyResponse.getStatusLine().getStatusCode();
response.setStatus(statusCode);
processResponseHeaders(request, response, slot.getRemoteURL(), proxyResponse);
byte[] consumedNewWebDriverSessionBody = null;
if (statusCode != HttpServletResponse.SC_INTERNAL_SERVER_ERROR &&
statusCode != HttpServletResponse.SC_NOT_FOUND) {
consumedNewWebDriverSessionBody = updateHubIfNewWebDriverSession(request, proxyResponse);
}
if (newSessionRequest && statusCode == HttpServletResponse.SC_INTERNAL_SERVER_ERROR) {
removeIncompleteNewSessionRequest();
}
if (statusCode == HttpServletResponse.SC_NOT_FOUND) {
removeSessionBrowserTimeout();
}
byte[] contentBeingForwarded = null;
if (responseBody != null) {
try {
InputStream in;
if (consumedNewWebDriverSessionBody == null) {
in = responseBody.getContent();
if (request.getRequestType() == RequestType.START_SESSION
&& request instanceof LegacySeleniumRequest) {
res = getResponseUtf8Content(in);
updateHubNewSeleniumSession(res);
in = new ByteArrayInputStream(res.getBytes("UTF-8"));
}
} else {
in = new ByteArrayInputStream(consumedNewWebDriverSessionBody);
}
final byte[] bytes = drainInputStream(in);
writeRawBody(response, bytes);
contentBeingForwarded = bytes;
} finally {
EntityUtils.consume(responseBody);
}
}
if (slot.getProxy() instanceof CommandListener) {
SeleniumBasedResponse wrappedResponse = new SeleniumBasedResponse(response);
wrappedResponse.setForwardedContent(contentBeingForwarded);
((CommandListener) slot.getProxy()).afterCommand(this, request, wrappedResponse);
}
response.flushBuffer();
} finally {
EntityUtils.consume(responseBody);
}
response.flushBuffer();
return res;
} finally {
forwardingRequest = false;
Thread.currentThread().setName(currentThreadName);
}
}
private void setThreadDisplayName() {
DateFormat dfmt = DateFormat.getTimeInstance();
String name = "Forwarding " + this + " to " + slot.getRemoteURL() + " at " +
dfmt.format(Calendar.getInstance().getTime());
Thread.currentThread().setName(name);
}
private void removeIncompleteNewSessionRequest() {
RemoteProxy proxy = slot.getProxy();
proxy.getRegistry().terminate(this, SessionTerminationReason.CREATIONFAILED);
}
private void removeSessionBrowserTimeout() {
RemoteProxy proxy = slot.getProxy();
proxy.getRegistry().terminate(this, SessionTerminationReason.BROWSER_TIMEOUT);
}
private void updateHubNewSeleniumSession(String content) {
ExternalSessionKey key = ExternalSessionKey.fromResponseBody(content);
setExternalKey(key);
}
private byte[] updateHubIfNewWebDriverSession(
SeleniumBasedRequest request, HttpResponse proxyResponse) throws IOException {
byte[] consumedData = null;
if (request.getRequestType() == RequestType.START_SESSION
&& request instanceof WebDriverRequest) {
Header h = proxyResponse.getFirstHeader("Location");
if (h == null) {
if (isSuccessJsonResponse(proxyResponse) && proxyResponse.getEntity() != null) {
InputStream stream = proxyResponse.getEntity().getContent();
consumedData = ByteStreams.toByteArray(stream);
stream.close();
String contentString = new String(consumedData, Charsets.UTF_8);
ExternalSessionKey key = ExternalSessionKey.fromJsonResponseBody(contentString);
if (key == null) {
throw new GridException(
"webdriver new session JSON response body did not contain a session ID");
}
setExternalKey(key);
return consumedData;
} else {
throw new GridException(
"new session request for webdriver should contain a location header "
+ "or an 'application/json;charset=UTF-8' response body with the session ID.");
}
}
ExternalSessionKey key = ExternalSessionKey.fromWebDriverRequest(h.getValue());
setExternalKey(key);
}
return consumedData;
}
private static boolean isSuccessJsonResponse(HttpResponse response) {
if (response.getStatusLine().getStatusCode() == HttpServletResponse.SC_OK) {
for (Header header : response.getHeaders("Content-Type")) {
MediaType type;
try {
type = MediaType.parse(header.getValue());
} catch (IllegalArgumentException ignored) {
continue;
}
if (MediaType.JSON_UTF_8.is(type)) {
return true;
}
}
}
return false;
}
private HttpResponse sendRequestToNode(HttpRequest proxyRequest) throws ClientProtocolException,
IOException {
HttpClient client = getClient();
URL remoteURL = slot.getRemoteURL();
HttpHost host = new HttpHost(remoteURL.getHost(), remoteURL.getPort());
return client.execute(host, proxyRequest);
}
private HttpRequest prepareProxyRequest(HttpServletRequest request
/*, ForwardConfiguration config*/)
throws IOException {
URL remoteURL = slot.getRemoteURL();
String pathSpec = request.getServletPath() + request.getContextPath();
String path = request.getRequestURI();
if (!path.startsWith(pathSpec)) {
throw new IllegalStateException("Expected path " + path + " to start with pathSpec "
+ pathSpec);
}
String end = path.substring(pathSpec.length());
String ok = remoteURL + end;
if (request.getQueryString() != null) {
ok += "?" + request.getQueryString();
}
String uri = new URL(remoteURL, ok).toExternalForm();
InputStream body = null;
if (request.getContentLength() > 0 || request.getHeader("Transfer-Encoding") != null) {
body = request.getInputStream();
}
HttpRequest proxyRequest;
if (body != null) {
BasicHttpEntityEnclosingRequest r =
new BasicHttpEntityEnclosingRequest(request.getMethod(), uri);
r.setEntity(new InputStreamEntity(body, request.getContentLength()));
proxyRequest = r;
} else {
proxyRequest = new BasicHttpRequest(request.getMethod(), uri);
}
for (Enumeration<?> e = request.getHeaderNames(); e.hasMoreElements(); ) {
String headerName = (String) e.nextElement();
if ("Content-Length".equalsIgnoreCase(headerName)) {
continue; // already set
}
proxyRequest.setHeader(headerName, request.getHeader(headerName));
}
return proxyRequest;
}
private void writeRawBody(HttpServletResponse response, byte[] rawBody) throws IOException {
OutputStream out = response.getOutputStream();
try {
// We need to set the Content-Length header before we write to the output stream. Usually
// the
// Content-Length header is already set because we take it from the proxied request. But, it
// won't
// be set when we consume chunked content, since that doesn't use Content-Length. As we're
// not
// going to send a chunked response, we need to set the Content-Length in order for the
// response
// to be valid.
if (!response.containsHeader("Content-Length")) {
response.setIntHeader("Content-Length", rawBody.length);
}
out.write(rawBody);
} catch (IOException e) {
throw new ClientGoneException(e);
} finally {
IOUtils.closeQuietly(out);
}
}
private byte[] drainInputStream(InputStream in) throws IOException {
try {
return ByteStreams.toByteArray(in);
} finally {
in.close();
}
}
private String getResponseUtf8Content(InputStream in) {
String res;
StringBuilder sb = new StringBuilder();
String line;
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(in, "UTF-8"));
while ((line = reader.readLine()) != null) {
// TODO freynaud bug ?
sb.append(line);/* .append("\n") */
}
in.close();
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
res = sb.toString();
return res;
}
private void processResponseHeaders(HttpServletRequest request, HttpServletResponse response,
URL remoteURL, HttpResponse proxyResponse)
throws MalformedURLException {
String pathSpec = request.getServletPath() + request.getContextPath();
for (Header header : proxyResponse.getAllHeaders()) {
String name = header.getName();
String value = header.getValue();
// HttpEntity#getContent() chews up the chunk-size octet (i.e., the InputStream does not
// actually map 1:1 to the underlying response body). This breaks any client expecting the
// chunk size. We could
// try to recreate it, but since the chunks are already read in and decoded, you'd end up with
// a
// single chunk, which isn't all that useful. So, we return the response as a traditional
// response with a
// Content-Length header, obviating the need for the Transfer-Encoding header.
if (name.equalsIgnoreCase("Transfer-Encoding") && value.equalsIgnoreCase("chunked")) {
continue;
}
// the location needs to point to the hub that will proxy
// everything.
if (name.equalsIgnoreCase("Location")) {
URL returnedLocation = new URL(value);
String driverPath = remoteURL.getPath();
String wrongPath = returnedLocation.getPath();
String correctPath = wrongPath.replace(driverPath, "");
Hub hub = slot.getProxy().getRegistry().getHub();
String location = "http://" + hub.getHost() + ":" + hub.getPort() + pathSpec + correctPath;
response.setHeader(name, location);
} else {
response.setHeader(name, value);
}
}
}
/**
* Allow you to retrieve an object previously stored on the test session.
*
* @return the object you stored
*/
public Object get(String key) {
return objects.get(key);
}
/**
* Allows you to store an object on the test session.
*
* @param key a non-null string
*/
public void put(String key, Object value) {
objects.put(key, value);
}
/**
* Sends a DELETE/testComplete (webdriver/selenium) session command to the remote, following web
* driver protocol.
*
* @return true is the remote replied successfully to the request.
*/
public boolean sendDeleteSessionRequest() {
URL remoteURL = slot.getRemoteURL();
HttpRequest request;
switch (slot.getProtocol()) {
case Selenium:
request =
new BasicHttpRequest("POST", remoteURL.toExternalForm()
+ "/?cmd=testComplete&sessionId=" + getExternalKey()
.getKey());
break;
case WebDriver:
String uri = remoteURL.toString() + "/session/" + externalKey;
request = new BasicHttpRequest("DELETE", uri);
break;
default:
throw new GridException("Error, protocol not implemented.");
}
HttpHost host = new HttpHost(remoteURL.getHost(), remoteURL.getPort());
HttpEntity responseBody = null;
boolean ok;
try {
HttpClient client = getClient();
HttpResponse response = client.execute(host, request);
responseBody = response.getEntity();
int code = response.getStatusLine().getStatusCode();
ok = (code >= 200) && (code <= 299);
} catch (Throwable e) {
ok = false;
// corrupted or the something else already sent the DELETE.
log.severe("Error releasing. Server corrupted ?");
}finally{
try {
EntityUtils.consume(responseBody);
} catch (IOException e) {
log.warning("Consuming the response body when DELETE to the node" + e.getMessage());
}
}
return ok;
}
/**
* allow to bypass time out for this session. ignore = true => the session will not time out.
* setIgnoreTimeout(true) also update the lastActivity to now.
*/
public void setIgnoreTimeout(boolean ignore) {
if (!ignore) {
lastActivity = timeSource.currentTimeInMillis();
}
this.ignoreTimeout = ignore;
}
public boolean isForwardingRequest() {
return forwardingRequest;
}
}
| |
/*
* DynamicJava - Copyright (C) 1999-2001
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files
* (the "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to permit
* persons to whom the Software is furnished to do so, subject to the
* following conditions:
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL DYADE BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Except as contained in this notice, the name of Dyade shall not be
* used in advertising or otherwise to promote the sale, use or other
* dealings in this Software without prior written authorization from
* Dyade.
*
*/
/* Generated By:JavaCC: Do not edit this line. ParserConstants.java */
package koala.dynamicjava.parser;
public interface ParserConstants {
int EOF = 0;
int SINGLE_LINE_COMMENT = 10;
int FORMAL_COMMENT = 11;
int MULTI_LINE_COMMENT = 12;
int ABSTRACT = 14;
int BOOLEAN = 15;
int BREAK = 16;
int BYTE = 17;
int CASE = 18;
int CATCH = 19;
int CHAR = 20;
int CLASS = 21;
int CONST = 22;
int CONTINUE = 23;
int _DEFAULT = 24;
int DO = 25;
int DOUBLE = 26;
int ELSE = 27;
int EXTENDS = 28;
int FALSE = 29;
int FINAL = 30;
int FINALLY = 31;
int FLOAT = 32;
int FOR = 33;
int GOTO = 34;
int IF = 35;
int IMPLEMENTS = 36;
int IMPORT = 37;
int INSTANCEOF = 38;
int INT = 39;
int INTERFACE = 40;
int LONG = 41;
int NATIVE = 42;
int NEW = 43;
int NULL = 44;
int PACKAGE = 45;
int PRIVATE = 46;
int PROTECTED = 47;
int PUBLIC = 48;
int RETURN = 49;
int SHORT = 50;
int STATIC = 51;
int SUPER = 52;
int SWITCH = 53;
int SYNCHRONIZED = 54;
int THIS = 55;
int THROW = 56;
int THROWS = 57;
int TRANSIENT = 58;
int TRUE = 59;
int TRY = 60;
int VOID = 61;
int VOLATILE = 62;
int WHILE = 63;
int INTEGER_LITERAL = 64;
int LONG_LITERAL = 65;
int DECIMAL_LITERAL = 66;
int HEX_LITERAL = 67;
int OCTAL_LITERAL = 68;
int FLOAT_LITERAL = 69;
int DOUBLE_LITERAL = 70;
int EXPONENT = 71;
int CHARACTER_LITERAL = 72;
int STRING_LITERAL = 73;
int IDENTIFIER = 74;
int LETTER = 75;
int DIGIT = 76;
int LPAREN = 77;
int RPAREN = 78;
int LBRACE = 79;
int RBRACE = 80;
int LBRACKET = 81;
int RBRACKET = 82;
int SEMICOLON = 83;
int COMMA = 84;
int DOT = 85;
int ASSIGN = 86;
int GREATER_THAN = 87;
int LESS = 88;
int BANG = 89;
int TILDE = 90;
int HOOK = 91;
int COLON = 92;
int EQUAL = 93;
int LESS_OR_EQUAL = 94;
int GREATER_OR_EQUAL = 95;
int NOT_EQUAL = 96;
int CONDITIONAL_OR = 97;
int CONDITIONAL_AND = 98;
int INCREMENT = 99;
int DECREMENT = 100;
int PLUS = 101;
int MINUS = 102;
int STAR = 103;
int SLASH = 104;
int BITWISE_AND = 105;
int BITWISE_OR = 106;
int XOR = 107;
int REMAINDER = 108;
int LEFT_SHIFT = 109;
int RIGHT_SIGNED_SHIFT = 110;
int RIGHT_UNSIGNED_SHIFT = 111;
int PLUS_ASSIGN = 112;
int MINUS_ASSIGN = 113;
int STAR_ASSIGN = 114;
int SLASH_ASSIGN = 115;
int AND_ASSIGN = 116;
int OR_ASSIGN = 117;
int XOR_ASSIGN = 118;
int REMAINDER_ASSIGN = 119;
int LEFT_SHIFT_ASSIGN = 120;
int RIGHT_SIGNED_SHIFT_ASSIGN = 121;
int RIGHT_UNSIGNED_SHIFTASSIGN = 122;
int DEFAULT = 0;
int IN_SINGLE_LINE_COMMENT = 1;
int IN_FORMAL_COMMENT = 2;
int IN_MULTI_LINE_COMMENT = 3;
String[] tokenImage = {
"<EOF>",
"\" \"",
"\"\\t\"",
"\"\\n\"",
"\"\\r\"",
"\"\\f\"",
"\"//\"",
"\"#\"",
"<token of kind 8>",
"\"/*\"",
"<SINGLE_LINE_COMMENT>",
"\"*/\"",
"\"*/\"",
"<token of kind 13>",
"\"abstract\"",
"\"boolean\"",
"\"break\"",
"\"byte\"",
"\"case\"",
"\"catch\"",
"\"char\"",
"\"class\"",
"\"const\"",
"\"continue\"",
"\"default\"",
"\"do\"",
"\"double\"",
"\"else\"",
"\"extends\"",
"\"false\"",
"\"final\"",
"\"finally\"",
"\"float\"",
"\"for\"",
"\"goto\"",
"\"if\"",
"\"implements\"",
"\"import\"",
"\"instanceof\"",
"\"int\"",
"\"interface\"",
"\"long\"",
"\"native\"",
"\"new\"",
"\"null\"",
"\"package\"",
"\"private\"",
"\"protected\"",
"\"public\"",
"\"return\"",
"\"short\"",
"\"static\"",
"\"super\"",
"\"switch\"",
"\"synchronized\"",
"\"this\"",
"\"throw\"",
"\"throws\"",
"\"transient\"",
"\"true\"",
"\"try\"",
"\"void\"",
"\"volatile\"",
"\"while\"",
"<INTEGER_LITERAL>",
"<LONG_LITERAL>",
"<DECIMAL_LITERAL>",
"<HEX_LITERAL>",
"<OCTAL_LITERAL>",
"<FLOAT_LITERAL>",
"<DOUBLE_LITERAL>",
"<EXPONENT>",
"<CHARACTER_LITERAL>",
"<STRING_LITERAL>",
"<IDENTIFIER>",
"<LETTER>",
"<DIGIT>",
"\"(\"",
"\")\"",
"\"{\"",
"\"}\"",
"\"[\"",
"\"]\"",
"\";\"",
"\",\"",
"\".\"",
"\"=\"",
"\">\"",
"\"<\"",
"\"!\"",
"\"~\"",
"\"?\"",
"\":\"",
"\"==\"",
"\"<=\"",
"\">=\"",
"\"!=\"",
"\"||\"",
"\"&&\"",
"\"++\"",
"\"--\"",
"\"+\"",
"\"-\"",
"\"*\"",
"\"/\"",
"\"&\"",
"\"|\"",
"\"^\"",
"\"%\"",
"\"<<\"",
"\">>\"",
"\">>>\"",
"\"+=\"",
"\"-=\"",
"\"*=\"",
"\"/=\"",
"\"&=\"",
"\"|=\"",
"\"^=\"",
"\"%=\"",
"\"<<=\"",
"\">>=\"",
"\">>>=\"",
};
}
| |
/**
* Copyright 2014 Thomas Naeff (github.com/thnaeff)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ch.thn.guiutil.component.imageanimation;
import java.awt.image.BufferedImage;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import ch.thn.guiutil.ImageUtil;
import ch.thn.guiutil.effects.imageanimation.ImageAnimation;
/**
* This is the base class for a {@link JLabel} whose icon can be animated.<br />
* The icon has to be set with the standard {@link #setIcon(Icon)} method.<br />
* See {@link ImageAnimationLabelFading} for an implementation example.
*
*
* @author Thomas Naeff (github.com/thnaeff)
*
* @param <A>
*
*/
public abstract class ImageAnimationLabel<A extends ImageAnimation<?>> extends JLabel {
private static final long serialVersionUID = -728553274740471716L;
private A imageAnimation = null;
private BufferedImage bufferedImage = null;
private Icon originalIcon = null;
private ImageIcon imageIcon = null;
private boolean iconChanged = true;
/**
*
*
* @see JLabel
*/
public ImageAnimationLabel() {
super();
init();
}
/**
*
*
* @param icon
* @see JLabel
*/
public ImageAnimationLabel(Icon icon) {
super(icon);
init();
}
/**
*
*
* @param text
* @see JLabel
*/
public ImageAnimationLabel(String text) {
super(text);
init();
}
/**
*
*
* @param text
* @param horizontalAlignment
* @see JLabel
*/
public ImageAnimationLabel(String text, int horizontalAlignment) {
super(text, horizontalAlignment);
init();
}
/**
*
*
* @param icon
* @param horizontalAlignment
* @see JLabel
*/
public ImageAnimationLabel(Icon icon, int horizontalAlignment) {
super(icon, horizontalAlignment);
init();
}
/**
*
*
* @param text
* @param icon
* @param horizontalAlignment
* @see JLabel
*/
public ImageAnimationLabel(String text, Icon icon, int horizontalAlignment) {
super(text, icon, horizontalAlignment);
init();
}
//TODO
public A getImageAnimation() {
return imageAnimation;
}
/**
*
*
* @param imageAnimation
*/
protected void init() {
if (imageAnimation == null) {
throw new NullPointerException("Image animation not set");
}
Thread t = new Thread(imageAnimation);
t.setName(this.getClass().getSimpleName());
t.start();
}
/**
*
*
* @param imageAnimation
*/
protected void setImageAnimation(A imageAnimation) {
this.imageAnimation = imageAnimation;
}
/**
*
*
* @param loops
*/
public void animate(int loops) {
if (imageIcon == null) {
return;
}
//Create a new image only if necessary
if (bufferedImage == null
|| bufferedImage.getWidth() != imageIcon.getIconWidth()
|| bufferedImage.getHeight() != imageIcon.getIconHeight()) {
bufferedImage = new BufferedImage(imageIcon.getIconWidth(), imageIcon.getIconHeight(), BufferedImage.TYPE_INT_ARGB);
imageAnimation.setOutputImage(bufferedImage);
}
if (iconChanged) {
super.setIcon(new ImageIcon(imageAnimation.getOutputImage()));
iconChanged = false;
}
imageAnimation.go(loops);
}
/**
* Pauses the animation
*
* @param pause
*/
public void pause(boolean pause) {
imageAnimation.pause(pause);
}
/**
* A flag which indicates if the icon has been changed with {@link #setIcon(Icon)}
*
* @return
*/
protected boolean hasIconChanged() {
return iconChanged;
}
/**
* Defines the icon this component will display. Setting a icon will stop the
* animation.
*/
@Override
public void setIcon(Icon icon) {
//setIcon is also called when constructing JLabel -> imageAnimation might not
//be initialized. Only pause when initialized and not paused yet.
if (imageAnimation != null && !imageAnimation.isPaused()) {
//Pause the animation wherever it is
imageAnimation.pause(true);
}
//Save icon to be able to return it with getIcon
this.originalIcon = icon;
//This ImageIcon is used for drawing
this.imageIcon = ImageUtil.iconToImageIcon(icon);
iconChanged = true;
super.setIcon(icon);
}
/**
* Returns the original icon which has been set with {@link #setIcon(Icon)}
*
* @return
*/
public Icon getOriginalIcon() {
return originalIcon;
}
/**
* Image to animate
*
* @return
*/
protected ImageIcon getImageIcon() {
return imageIcon;
}
/**
* Returns the icon that the label displays.<br />
* <br />
* Note: This icon is not the same object as the one set with {@link #setIcon(Icon)}.
* This {@link ImageAnimationLabel} uses an internally created buffered image
* to draw the animations on. User {@link #getOriginalIcon()} to get the icon
* which has been set with {@link #setIcon(Icon)}
*/
@Override
public Icon getIcon() {
//Just a method override to provide additional javadoc information
return super.getIcon();
}
}
| |
package genecode.gene;
import genecode.Context;
import genecode.Genome;
import genecode.function.Function;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* A gene which wraps a function call directly.
*/
public class FunctionGene
extends AbstractGene
{
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
/**
* Our function.
*/
private Function myFunction;
/**
* The genes which we refer to as arguments, by their handles.
*/
private List<Gene.Handle> myArgs;
/**
* The genes which we refer to as arguments, cached.
*/
private Gene[] myArgGenes;
/**
* The genome associated with the cache.
*/
private Genome myArgGenesGenome;
/**
* A place to hold the values which we pass to the function.
*/
private Object[] myValues;
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
/**
* CTOR.
*
* @param function The function which we wrap.
*/
public FunctionGene(final Function function)
{
super(function.getReturnType());
myFunction = function;
myArgs = new ArrayList<>(function.getArgTypes().size());
myArgGenes = null;
myArgGenesGenome = null;
myValues = new Object[function.getArgTypes().size()];
}
/**
* Set the arguments. We use an array since order may be important.
*
* <p>Generally speaking you should not call this method. It's
* mainly here to facilitate testing.
*
* @param args The arguments to set for this gene.
*
* @throws IllegalArgumentException If the args violate the gene's
* constraints.
*/
public void setArgs(final Gene.Handle... args)
throws IllegalArgumentException
{
if (args == null) {
throw new IllegalArgumentException(
"Given a null list of args"
);
}
final List<Class<?>> argTypes = myFunction.getArgTypes();
if (args.length != argTypes.size()) {
throw new IllegalArgumentException(
"Number of arguments, " + args.length + ", " +
"did not match the expected number, " + argTypes.size()
);
}
for (Gene.Handle arg : args) {
if (arg == null) {
throw new IllegalArgumentException(
"Args had a null value: " + args
);
}
}
// Safe to do
flush();
myArgs.clear();
myArgs.addAll(Arrays.asList(args));
myArgGenes = null;
myArgGenesGenome = null;
}
/**
* {@inheritDoc}
*/
@Override
public int getGraphSize(final Genome genome)
{
// Looped?
if (myInGetGraphSize) {
// If we just became re-entrant then there is nothing more
// to see here. We return 1 since, though we have counted
// ourselves already, we want to account for the fact that
// we are a node that's referred to by another function.
// This is a bit of a cheat but sort of makes semantic
// sense.
return 1;
}
myInGetGraphSize = true;
try {
int size = super.getGraphSize(genome);
for (int i=0; i < myArgs.size(); i++) {
final Gene gene = getArg(i, genome);
size += (gene == null) ? 1 : gene.getGraphSize(genome);
}
return size;
}
finally {
myInGetGraphSize = false;
}
}
private boolean myInGetGraphSize = false;
/**
* {@inheritDoc}
*/
@Override
public void getGraphHandles(final Genome genome,
final List<Gene.Handle> dest)
{
// Looped?
if (myInGetGraphHandles) {
// Like getGraphSize() we handle reentrancy by adding this node
dest.add(getHandle());
return;
}
myInGetGraphHandles = true;
try {
super.getGraphHandles(genome, dest);
for (int i=0; i < myArgs.size(); i++) {
final Gene gene = getArg(i, genome);
if (gene == null) {
dest.add(myArgs.get(i));
}
else {
gene.getGraphHandles(genome, dest);
}
}
}
finally {
myInGetGraphHandles = false;
}
}
private boolean myInGetGraphHandles = false;
/**
* {@inheritDoc}
*/
@Override
public void mutate(final Genome genome,
final double factor)
{
// Possibly change an input
if (!myArgs.isEmpty() && Math.random() < factor) {
// Pick one an change it
final int index = (int)(Math.random() * myArgs.size());
myArgs.set(
index,
genome.pickAnyHandle(
myFunction.getArgTypes().get(index)
)
);
}
}
/**
* {@inheritDoc}
*
* <p>Missing genes will be represented as {@code <?>}.
*/
@Override
public String toString(final Genome genome)
{
final StringBuilder sb = new StringBuilder();
sb.append(myFunction).append("(");
if (myInToString) {
sb.append("<LOOPS>");
}
else {
myInToString = true;
try {
for (int i=0; i < myArgs.size(); i++) {
if (i > 0) {
sb.append(",");
}
final Gene gene = getArg(i, genome);
if (gene != null) {
sb.append(gene.toString(genome));
}
else {
sb.append("<?>");
}
}
}
finally {
myInToString = false;
}
}
sb.append(')');
return sb.toString();
}
private boolean myInToString = false;
/**
* {@inheritDoc}
*/
@Override
public boolean equals(final Object o)
{
if (!super.equals(o)) {
return false;
}
if (!(o instanceof FunctionGene)) {
return false;
}
final FunctionGene that = (FunctionGene)o;
if (!that.myFunction.equals(myFunction)) {
return false;
}
if (!that.myArgs.equals(myArgs)) {
return false;
}
return true;
}
/**
* {@inheritDoc}
*/
@Override
public Gene clone()
{
final FunctionGene gene = (FunctionGene)super.clone();
gene.myFunction = myFunction.clone();
gene.myArgs = new ArrayList<>(gene.myArgs);
gene.myArgGenes = null;
gene.myArgGenesGenome = null;
gene.myValues = new Object[myValues.length];
return gene;
}
/**
* {@inheritDoc}
*
* <p>We accumulate the minimum number of arguments.
*/
@Override
protected void safeInit(final Genome genome)
throws IllegalStateException
{
for (int i=0; i < myFunction.getArgTypes().size(); i++) {
myArgs.add(
genome.pickAnyHandle(myFunction.getArgTypes().get(i))
);
}
}
/**
* {@inheritDoc}
*/
@Override
protected Object safeEvaluate(final Context context,
final Genome genome)
{
// Populate the arguments for the functioon
for (int i=0; i < myValues.length; i++) {
final Gene gene = getArg(i, genome);
if (gene == null) {
return null;
}
myValues[i] = gene.evaluate(context, genome);
if (myValues[i] == null) {
return null;
}
}
// Hand off
return myFunction.call(myValues);
}
/**
* Get the gene argument with the given index from the genome, if
* it exists.
*
* @param index The index of the argument to get.
* @param genome The genome to get the argument from.
*
* @return The gene or {@code null} if it was not found.
*/
private Gene getArg(final int index,
final Genome genome)
{
if (index < 0 || index >= myArgs.size()) {
return null;
}
else {
// (Re)build the cache?
if (myArgGenesGenome != genome) { // pointer compare
myArgGenes = null;
}
if (myArgGenes == null) {
myArgGenes = new Gene[myArgs.size()];
for (int i=0; i < myArgs.size(); i++) {
myArgGenes[i] = genome.get(myArgs.get(i));
}
myArgGenesGenome = genome;
}
// Grab from the cache
return myArgGenes[index];
}
}
}
| |
package edu.ucdavis.cacheca;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
// @author Originally written in C++ by Zhaopeng; converted to Java by Christine
public class Cache {
private Map<String, Record> mRecords;
private int mMinOrder;
private int mOrder;
public Cache(int order, int minOrder){
mRecords = new HashMap<String, Record>();
init(order, minOrder);
}
private void init(int order, int minOrder){
mMinOrder = minOrder;
mOrder = order;
}
void clear(){
mRecords.clear();
}
void build(String inputFile){
ArrayList<String> tokens = new ArrayList<String>();
tokens.add("<s>");
String line, cachePrefix;
BufferedReader br;
try {
br = new BufferedReader(new FileReader(inputFile));
line = br.readLine();
while(line != null){
String[] splitString = line.split("((?<=\\.)|(?=\\.))| |((?<=\\{)|(?=\\{))|((?<=\\()|(?=\\())|((?<=\\[)|(?=\\[))|((?<=\\;)|(?=\\;))");
for(String token : splitString){
if (token != null && !(token.equals(""))){
token.trim();
token = token.replaceAll("\t", "");
token = token.replaceAll(" ", "");
tokens.add(token);
}
}
line = br.readLine();
}
br.close();
} catch (FileNotFoundException e) { e.printStackTrace(); }
catch (IOException e) { e.printStackTrace(); }
for(int i = mMinOrder-1; i < tokens.size(); i++){
int start = i-(mOrder-1) > 0 ? i-(mOrder-1) : 0;
int end = i-1;
StringBuilder mergedString = new StringBuilder();
if (start < end){
for(int j = start; j < end; j++){
mergedString.append(tokens.get(j) + " ");
}
mergedString.append(tokens.get(end));
}
cachePrefix = mergedString.toString();
update(cachePrefix, tokens.get(i));
}
}
void update(String prefix, String token){
int n = Utilities.countWords(prefix);
for(int i = n; i >= mMinOrder-1; --i){
String newPrefix = Utilities.getLastNWords(prefix, i);
Record val = mRecords.get(newPrefix);
if (val != null){
val.update(token);
}
else{
mRecords.put(newPrefix, new Record(token));
}
}
}
public ArrayList<Word> updateCandidates(String prefix, ArrayList<Word> candidates){
int cacheCount = getCount(prefix);
if(cacheCount != 0){
float cache_discount = (float)cacheCount/(cacheCount+1);
float ngram_discount = 1-cache_discount;
// found cache records of the prefix
Map<String, Integer> tokenCounts = getTokenCounts(prefix);
// update the information of candidates from ngram model
for (int i=0; i<(int)candidates.size(); ++i)
{
// discount the probability first
candidates.get(i).mProb *= ngram_discount;
Integer val = tokenCounts.get(candidates.get(i).mToken);
if (val != null)
{
candidates.get(i).mProb += cache_discount * val/cacheCount;
tokenCounts.remove(candidates.get(i).mToken);
}
}
// add the left records in the cache to the candidates
// See http://stackoverflow.com/questions/46898/how-do-i-iterate-over-each-entry-in-a-map
for (Map.Entry<String, Integer> entry : tokenCounts.entrySet())
{
candidates.add(new Word(entry.getKey(), cache_discount * ((float)entry.getValue()/cacheCount)));
}
// See: http://stackoverflow.com/questions/890254/how-can-i-sort-this-arraylist-the-way-that-i-want
Collections.sort(candidates, new Comparator<Word>() {
// first less than the second = neg, first greater than second = pos
@Override
public int compare(Word one, Word two) {
if(one.mProb > two.mProb)
return 1;
else if(one.mProb < two.mProb)
return -1;
return 0;
}
});
}
return candidates;
}
/**
* get the possible suggestions from the cache
* @param prefix the previous (n-1) tokens
* @return
**/
Map<String, Integer> getTokenCounts(String prefix)
{
int n = Utilities.countWords(prefix);
for (int i=n; i>=mMinOrder-1; --i)
{
// use the prefix from longest to m_min_order until we match the prefix
String newPrefix = Utilities.getLastNWords(prefix, i);
Record val = mRecords.get(newPrefix);
if (val != null)
{
return val.getTokenCounts();
}
}
return null;
}
/**
* get the number of records for a given prefix (to calculate the discount)
* @param prefix the previous (n-1) tokens
**/
int getCount(String prefix)
{
int n = Utilities.countWords(prefix);
for (int i=n; i>=mMinOrder-1; --i)
{
String newPrefix = Utilities.getLastNWords(prefix, i);
Record val = mRecords.get(newPrefix);
if (val != null)
{
return val.getCount();
}
}
return 0;
}
//inner class "Record"
private class Record{
private int mCount;
private Map<String, Integer> mTokens;
private Record (){
mCount = 0;
mTokens = new HashMap<String, Integer>();
}
private Record (String token){
mCount = 1;
mTokens = new HashMap<String, Integer>();
mTokens.put(token, 1);
}
void update(String token){
mCount++;
Integer val = mTokens.get(token);
if(val != null){
mTokens.put(token, ++val);
}
else{
mTokens.put(token, 1);
}
}
Map<String, Integer> getTokenCounts(){
return mTokens;
}
int getCount(){
return mCount;
}
}
}
| |
package com.mentor.nucleus.bp.welcome.test;
//=====================================================================
//
//File: $RCSfile: WelcomePageTestMetamodel.java,v $
//Version: $Revision: 1.6 $
//Modified: $Date: 2013/01/10 23:05:14 $
//
//(c) Copyright 2004-2014 by Mentor Graphics Corp. All rights reserved.
//
//=====================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//=====================================================================
import java.util.Properties;
import junit.framework.TestCase;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TreeItem;
import org.eclipse.ui.IViewPart;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import com.mentor.nucleus.bp.core.XtUMLNature;
import com.mentor.nucleus.bp.core.common.PersistableModelComponent;
import com.mentor.nucleus.bp.core.common.PersistenceManager;
import com.mentor.nucleus.bp.utilities.ui.TreeUtilities;
import com.mentor.nucleus.bp.test.TestUtil;
import com.mentor.nucleus.bp.test.common.TestingUtilities;
import com.mentor.nucleus.bp.ui.explorer.ExplorerView;
import com.mentor.nucleus.bp.welcome.gettingstarted.SampleProjectGettingStartedAction;
public class WelcomePageTestMetamodel extends TestCase {
private static IProject project;
private static IViewPart g_view = null;
// This test is setup so we can swap-in a different test model
// if we choose to do so. All we should need to do is change the
// name of the mode here.
private final String ProjectName = "xtUML_Metamodel";
private String[] expectedXtUMLFiles = {
"models/" + ProjectName + "/xtUML_Metamodel.xtuml",
"models/" + ProjectName + "/ooaofooa/Activity/Activity.xtuml",
"models/" + ProjectName + "/ooaofooa/Domain/Domain.xtuml",
"models/" + ProjectName + "/ooaofooa/Component/Component Nesting/Component Nesting.xtuml",
"models/" + ProjectName + "/ooaofooa/Value/Value.xtuml"};
private String[] expectedFiles = expectedXtUMLFiles;
public WelcomePageTestMetamodel() {
super();
}
public void runSingleFileGettingStartedAction() {
SampleProjectGettingStartedAction action = new SampleProjectGettingStartedAction();
Properties props = new Properties();
props.put("model", ProjectName);
props.put("SingleFileModel", "true");
action.run(null, props);
}
public boolean projectExists(String projectName) {
// Check that project exists in the workspace
// and that it is indeed an xtUML project
boolean projectExists = false;
project = ResourcesPlugin.getWorkspace().getRoot().getProject(
projectName);
projectExists = project.exists();
assertTrue("Project: " + projectName + " does not exist.",
projectExists);
projectExists = project.isOpen();
assertTrue("Project: " + projectName + " is not open.", projectExists);
return projectExists;
}
public void isxtUMLProject(IProject project) {
try {
assertTrue("Project: " + project.getName()
+ " is not an xtUML project.", project
.hasNature(XtUMLNature.ID));
} catch (CoreException e) {
e.printStackTrace();
}
}
public void containsProjectMembers() {
/*
* spot check for some of the files
*/
for (int i = 0; i < expectedFiles.length; i++) {
IFile file = project.getFile(expectedFiles[i]);
assertTrue("Expected file: " + file.getName() + " does not exist.",
file.exists());
}
}
public void verifyProjectCreated() {
boolean projectExists = projectExists(ProjectName);
if (projectExists)
containsProjectMembers();
}
public void raiseWorkbench() {
Shell s = PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell();
s.forceActive();
s.forceFocus();
}
public void testProjectCreation() {
runSingleFileGettingStartedAction();
// Give the import time to work
TestUtil.sleepWithDispatchOfEvents(7000);
verifyProjectCreated();
raiseWorkbench();
}
public void testNoProjectOverwrite() {
IFile dummyFile = project.getFile("dummyFile");
IFile existingFile = project.getFile(expectedXtUMLFiles[0]);
try {
dummyFile.create(existingFile.getContents(), IResource.REPLACE, null);
} catch (CoreException ce) {
fail("Failed to create dummy file.");
}
if (!dummyFile.exists()) {
fail("Failed to create the dummy file.");
}
TestUtil.selectButtonInDialog(2000, "No");
runSingleFileGettingStartedAction();
// We said not to overwrite, so the dummy file should still be there
assertTrue("The project was overwritten when it shouldn't have been.",
dummyFile.exists());
}
public void testProjectOverwrite() throws Exception {
IFile dummyFile = project.getFile("dummyFile");
// Make sure the marker file is there.
assertTrue("The dummy file for testing doesn't exist.", dummyFile.exists());
TestUtil.selectButtonInDialog(1000, "Yes");
runSingleFileGettingStartedAction();
// Give the import time to work
TestUtil.sleepWithDispatchOfEvents(5000);
// We said to overwrite, so the dummy file should not be there
assertFalse("The project was not overwritten when it should have been.",
dummyFile.exists());
TestingUtilities.deleteProject(ProjectName);
}
public void testImportLoadPersist() throws Exception {
int numImports = 1;
for (int i = 0; i < numImports; i++) {
System.out.println("Import number: " + String.valueOf(i+1));
runSingleFileGettingStartedAction();
TestingUtilities.allowJobCompletion();
raiseWorkbench();
verifyProjectCreated();
final IProject project = getProject(ProjectName);
checkForErrors();
// load and persist
PersistableModelComponent pmc = PersistenceManager.getRootComponent(project);
pmc.loadComponentAndChildren(new NullProgressMonitor());
pmc.persistSelfAndChildren();
checkForErrors();
TestingUtilities.deleteProject(ProjectName);
}
}
private void checkForErrors() {
// Check the problems view
g_view = selectView(project, "org.eclipse.ui.views.ProblemView");
// Check the explorer view for orphaned elements
ExplorerView view = null;
try {
view = (ExplorerView) PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage().showView(
"com.mentor.nucleus.bp.ui.explorer.ExplorerView");
} catch (PartInitException e) {
}
view.getTreeViewer().refresh();
while(PlatformUI.getWorkbench().getDisplay().readAndDispatch());
view.getTreeViewer().expandAll();
while(PlatformUI.getWorkbench().getDisplay().readAndDispatch());
TreeItem topItem = view.getTreeViewer().getTree().getTopItem();
TreeItem[] orphaned = TreeUtilities.getOrphanedElementsFromTree(topItem);
if (orphaned.length > 0) {
String elements = TreeUtilities.getTextResultForOrphanedElementList(orphaned);
assertTrue("Orphaned elements are present: " + elements, false);
}
}
private IProject getProject(String name) {
IProject project = ResourcesPlugin.getWorkspace().getRoot().getProject(
name);
assertTrue( project.exists() );
return project;
}
private IViewPart selectView(final IProject project, final String viewName) {
g_view = null;
Runnable r = new Runnable() {
public void run() {
IWorkbenchPage page = PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage();
try {
g_view = page.showView(viewName); //$NON-NLS-1$
} catch (PartInitException e) {
fail("Failed to open the " + viewName + " view"); //$NON-NLS-1$
}
}
};
r.run();
assertTrue("Unable to select view: " + viewName, g_view != null);
return g_view;
}
}
| |
package org.openapitools.model;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.openapitools.model.ClockDifference;
import org.openapitools.model.DiskSpaceMonitorDescriptorDiskSpace;
import org.openapitools.model.ResponseTimeMonitorData;
import org.openapitools.model.SwapSpaceMonitorMemoryUsage2;
/**
* HudsonMasterComputermonitorData
*/
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaMSF4JServerCodegen", date = "2022-02-13T02:17:10.827152Z[Etc/UTC]")
public class HudsonMasterComputermonitorData {
@JsonProperty("hudson.node_monitors.SwapSpaceMonitor")
private SwapSpaceMonitorMemoryUsage2 hudsonNodeMonitorsSwapSpaceMonitor;
@JsonProperty("hudson.node_monitors.TemporarySpaceMonitor")
private DiskSpaceMonitorDescriptorDiskSpace hudsonNodeMonitorsTemporarySpaceMonitor;
@JsonProperty("hudson.node_monitors.DiskSpaceMonitor")
private DiskSpaceMonitorDescriptorDiskSpace hudsonNodeMonitorsDiskSpaceMonitor;
@JsonProperty("hudson.node_monitors.ArchitectureMonitor")
private String hudsonNodeMonitorsArchitectureMonitor;
@JsonProperty("hudson.node_monitors.ResponseTimeMonitor")
private ResponseTimeMonitorData hudsonNodeMonitorsResponseTimeMonitor;
@JsonProperty("hudson.node_monitors.ClockMonitor")
private ClockDifference hudsonNodeMonitorsClockMonitor;
@JsonProperty("_class")
private String propertyClass;
public HudsonMasterComputermonitorData hudsonNodeMonitorsSwapSpaceMonitor(SwapSpaceMonitorMemoryUsage2 hudsonNodeMonitorsSwapSpaceMonitor) {
this.hudsonNodeMonitorsSwapSpaceMonitor = hudsonNodeMonitorsSwapSpaceMonitor;
return this;
}
/**
* Get hudsonNodeMonitorsSwapSpaceMonitor
* @return hudsonNodeMonitorsSwapSpaceMonitor
**/
@ApiModelProperty(value = "")
public SwapSpaceMonitorMemoryUsage2 getHudsonNodeMonitorsSwapSpaceMonitor() {
return hudsonNodeMonitorsSwapSpaceMonitor;
}
public void setHudsonNodeMonitorsSwapSpaceMonitor(SwapSpaceMonitorMemoryUsage2 hudsonNodeMonitorsSwapSpaceMonitor) {
this.hudsonNodeMonitorsSwapSpaceMonitor = hudsonNodeMonitorsSwapSpaceMonitor;
}
public HudsonMasterComputermonitorData hudsonNodeMonitorsTemporarySpaceMonitor(DiskSpaceMonitorDescriptorDiskSpace hudsonNodeMonitorsTemporarySpaceMonitor) {
this.hudsonNodeMonitorsTemporarySpaceMonitor = hudsonNodeMonitorsTemporarySpaceMonitor;
return this;
}
/**
* Get hudsonNodeMonitorsTemporarySpaceMonitor
* @return hudsonNodeMonitorsTemporarySpaceMonitor
**/
@ApiModelProperty(value = "")
public DiskSpaceMonitorDescriptorDiskSpace getHudsonNodeMonitorsTemporarySpaceMonitor() {
return hudsonNodeMonitorsTemporarySpaceMonitor;
}
public void setHudsonNodeMonitorsTemporarySpaceMonitor(DiskSpaceMonitorDescriptorDiskSpace hudsonNodeMonitorsTemporarySpaceMonitor) {
this.hudsonNodeMonitorsTemporarySpaceMonitor = hudsonNodeMonitorsTemporarySpaceMonitor;
}
public HudsonMasterComputermonitorData hudsonNodeMonitorsDiskSpaceMonitor(DiskSpaceMonitorDescriptorDiskSpace hudsonNodeMonitorsDiskSpaceMonitor) {
this.hudsonNodeMonitorsDiskSpaceMonitor = hudsonNodeMonitorsDiskSpaceMonitor;
return this;
}
/**
* Get hudsonNodeMonitorsDiskSpaceMonitor
* @return hudsonNodeMonitorsDiskSpaceMonitor
**/
@ApiModelProperty(value = "")
public DiskSpaceMonitorDescriptorDiskSpace getHudsonNodeMonitorsDiskSpaceMonitor() {
return hudsonNodeMonitorsDiskSpaceMonitor;
}
public void setHudsonNodeMonitorsDiskSpaceMonitor(DiskSpaceMonitorDescriptorDiskSpace hudsonNodeMonitorsDiskSpaceMonitor) {
this.hudsonNodeMonitorsDiskSpaceMonitor = hudsonNodeMonitorsDiskSpaceMonitor;
}
public HudsonMasterComputermonitorData hudsonNodeMonitorsArchitectureMonitor(String hudsonNodeMonitorsArchitectureMonitor) {
this.hudsonNodeMonitorsArchitectureMonitor = hudsonNodeMonitorsArchitectureMonitor;
return this;
}
/**
* Get hudsonNodeMonitorsArchitectureMonitor
* @return hudsonNodeMonitorsArchitectureMonitor
**/
@ApiModelProperty(value = "")
public String getHudsonNodeMonitorsArchitectureMonitor() {
return hudsonNodeMonitorsArchitectureMonitor;
}
public void setHudsonNodeMonitorsArchitectureMonitor(String hudsonNodeMonitorsArchitectureMonitor) {
this.hudsonNodeMonitorsArchitectureMonitor = hudsonNodeMonitorsArchitectureMonitor;
}
public HudsonMasterComputermonitorData hudsonNodeMonitorsResponseTimeMonitor(ResponseTimeMonitorData hudsonNodeMonitorsResponseTimeMonitor) {
this.hudsonNodeMonitorsResponseTimeMonitor = hudsonNodeMonitorsResponseTimeMonitor;
return this;
}
/**
* Get hudsonNodeMonitorsResponseTimeMonitor
* @return hudsonNodeMonitorsResponseTimeMonitor
**/
@ApiModelProperty(value = "")
public ResponseTimeMonitorData getHudsonNodeMonitorsResponseTimeMonitor() {
return hudsonNodeMonitorsResponseTimeMonitor;
}
public void setHudsonNodeMonitorsResponseTimeMonitor(ResponseTimeMonitorData hudsonNodeMonitorsResponseTimeMonitor) {
this.hudsonNodeMonitorsResponseTimeMonitor = hudsonNodeMonitorsResponseTimeMonitor;
}
public HudsonMasterComputermonitorData hudsonNodeMonitorsClockMonitor(ClockDifference hudsonNodeMonitorsClockMonitor) {
this.hudsonNodeMonitorsClockMonitor = hudsonNodeMonitorsClockMonitor;
return this;
}
/**
* Get hudsonNodeMonitorsClockMonitor
* @return hudsonNodeMonitorsClockMonitor
**/
@ApiModelProperty(value = "")
public ClockDifference getHudsonNodeMonitorsClockMonitor() {
return hudsonNodeMonitorsClockMonitor;
}
public void setHudsonNodeMonitorsClockMonitor(ClockDifference hudsonNodeMonitorsClockMonitor) {
this.hudsonNodeMonitorsClockMonitor = hudsonNodeMonitorsClockMonitor;
}
public HudsonMasterComputermonitorData propertyClass(String propertyClass) {
this.propertyClass = propertyClass;
return this;
}
/**
* Get propertyClass
* @return propertyClass
**/
@ApiModelProperty(value = "")
public String getPropertyClass() {
return propertyClass;
}
public void setPropertyClass(String propertyClass) {
this.propertyClass = propertyClass;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
HudsonMasterComputermonitorData hudsonMasterComputermonitorData = (HudsonMasterComputermonitorData) o;
return Objects.equals(this.hudsonNodeMonitorsSwapSpaceMonitor, hudsonMasterComputermonitorData.hudsonNodeMonitorsSwapSpaceMonitor) &&
Objects.equals(this.hudsonNodeMonitorsTemporarySpaceMonitor, hudsonMasterComputermonitorData.hudsonNodeMonitorsTemporarySpaceMonitor) &&
Objects.equals(this.hudsonNodeMonitorsDiskSpaceMonitor, hudsonMasterComputermonitorData.hudsonNodeMonitorsDiskSpaceMonitor) &&
Objects.equals(this.hudsonNodeMonitorsArchitectureMonitor, hudsonMasterComputermonitorData.hudsonNodeMonitorsArchitectureMonitor) &&
Objects.equals(this.hudsonNodeMonitorsResponseTimeMonitor, hudsonMasterComputermonitorData.hudsonNodeMonitorsResponseTimeMonitor) &&
Objects.equals(this.hudsonNodeMonitorsClockMonitor, hudsonMasterComputermonitorData.hudsonNodeMonitorsClockMonitor) &&
Objects.equals(this.propertyClass, hudsonMasterComputermonitorData.propertyClass);
}
@Override
public int hashCode() {
return Objects.hash(hudsonNodeMonitorsSwapSpaceMonitor, hudsonNodeMonitorsTemporarySpaceMonitor, hudsonNodeMonitorsDiskSpaceMonitor, hudsonNodeMonitorsArchitectureMonitor, hudsonNodeMonitorsResponseTimeMonitor, hudsonNodeMonitorsClockMonitor, propertyClass);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class HudsonMasterComputermonitorData {\n");
sb.append(" hudsonNodeMonitorsSwapSpaceMonitor: ").append(toIndentedString(hudsonNodeMonitorsSwapSpaceMonitor)).append("\n");
sb.append(" hudsonNodeMonitorsTemporarySpaceMonitor: ").append(toIndentedString(hudsonNodeMonitorsTemporarySpaceMonitor)).append("\n");
sb.append(" hudsonNodeMonitorsDiskSpaceMonitor: ").append(toIndentedString(hudsonNodeMonitorsDiskSpaceMonitor)).append("\n");
sb.append(" hudsonNodeMonitorsArchitectureMonitor: ").append(toIndentedString(hudsonNodeMonitorsArchitectureMonitor)).append("\n");
sb.append(" hudsonNodeMonitorsResponseTimeMonitor: ").append(toIndentedString(hudsonNodeMonitorsResponseTimeMonitor)).append("\n");
sb.append(" hudsonNodeMonitorsClockMonitor: ").append(toIndentedString(hudsonNodeMonitorsClockMonitor)).append("\n");
sb.append(" propertyClass: ").append(toIndentedString(propertyClass)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.record.vector;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.Iterator;
import java.util.List;
import org.apache.drill.categories.VectorTest;
import org.apache.drill.common.util.DrillFileUtils;
import org.apache.drill.exec.client.DrillClient;
import org.apache.drill.exec.pop.PopUnitTestBase;
import org.apache.drill.exec.record.RecordBatchLoader;
import org.apache.drill.exec.record.VectorWrapper;
import org.apache.drill.exec.rpc.user.QueryDataBatch;
import org.apache.drill.exec.server.Drillbit;
import org.apache.drill.exec.server.RemoteServiceSet;
import org.apache.drill.exec.vector.ValueVector;
import org.apache.drill.categories.SlowTest;
import org.junit.Test;
import com.google.common.base.Charsets;
import com.google.common.io.Files;
import org.junit.experimental.categories.Category;
/* This class tests the existing date types. Simply using date types
* by casting from VarChar, performing basic functions and converting
* back to VarChar.
*/
@Category({SlowTest.class, VectorTest.class})
public class TestDateTypes extends PopUnitTestBase {
// private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestDateTypes.class);
@Test
public void testDate() throws Exception {
try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
Drillbit bit = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
// run query.
bit.run();
client.connect();
List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
Files.toString(DrillFileUtils.getResourceAsFile("/record/vector/test_date.json"), Charsets.UTF_8)
.replace("#{TEST_FILE}", "/test_simple_date.json"));
RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
QueryDataBatch batch = results.get(0);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
for (VectorWrapper<?> v : batchLoader) {
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
assertEquals((accessor.getObject(0).toString()), ("1970-01-02"));
assertEquals((accessor.getObject(1).toString()), ("2008-12-28"));
assertEquals((accessor.getObject(2).toString()), ("2000-02-27"));
}
batchLoader.clear();
for(QueryDataBatch b : results){
b.release();
}
}
}
@Test
public void testSortDate() throws Exception {
try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
Drillbit bit = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
// run query.
bit.run();
client.connect();
List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
Files.toString(DrillFileUtils.getResourceAsFile("/record/vector/test_sort_date.json"), Charsets.UTF_8)
.replace("#{TEST_FILE}", "/test_simple_date.json"));
RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
QueryDataBatch batch = results.get(1);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
for (VectorWrapper<?> v : batchLoader) {
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
assertEquals((accessor.getObject(0).toString()), new String("1970-01-02"));
assertEquals((accessor.getObject(1).toString()), new String("2000-02-27"));
assertEquals((accessor.getObject(2).toString()), new String("2008-12-28"));
}
batchLoader.clear();
for(QueryDataBatch b : results){
b.release();
}
}
}
@Test
public void testTimeStamp() throws Exception {
try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
Drillbit bit = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
// run query.
bit.run();
client.connect();
List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
Files.toString(DrillFileUtils.getResourceAsFile("/record/vector/test_timestamp.json"), Charsets.UTF_8)
.replace("#{TEST_FILE}", "/test_simple_date.json"));
RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
QueryDataBatch batch = results.get(0);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
for (VectorWrapper<?> v : batchLoader) {
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
assertEquals(accessor.getObject(0).toString(),"1970-01-02 10:20:33.000");
assertEquals(accessor.getObject(1).toString(),"2008-12-28 11:34:00.129");
assertEquals(accessor.getObject(2).toString(), "2000-02-27 14:24:00.000");
}
batchLoader.clear();
for(QueryDataBatch b : results){
b.release();
}
}
}
@Test
public void testInterval() throws Exception {
try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
Drillbit bit = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
// run query.
bit.run();
client.connect();
List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
Files.toString(DrillFileUtils.getResourceAsFile("/record/vector/test_interval.json"), Charsets.UTF_8)
.replace("#{TEST_FILE}", "/test_simple_interval.json"));
RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
QueryDataBatch batch = results.get(0);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
ValueVector.Accessor accessor = itr.next().getValueVector().getAccessor();
// Check the interval type
assertEquals((accessor.getObject(0).toString()), ("2 years 2 months 1 day 1:20:35.0"));
assertEquals((accessor.getObject(1).toString()), ("2 years 2 months 0 days 0:0:0.0"));
assertEquals((accessor.getObject(2).toString()), ("0 years 0 months 0 days 1:20:35.0"));
assertEquals((accessor.getObject(3).toString()),("2 years 2 months 1 day 1:20:35.897"));
assertEquals((accessor.getObject(4).toString()), ("0 years 0 months 0 days 0:0:35.4"));
assertEquals((accessor.getObject(5).toString()), ("1 year 10 months 1 day 0:-39:-25.0"));
accessor = itr.next().getValueVector().getAccessor();
// Check the interval year type
assertEquals((accessor.getObject(0).toString()), ("2 years 2 months "));
assertEquals((accessor.getObject(1).toString()), ("2 years 2 months "));
assertEquals((accessor.getObject(2).toString()), ("0 years 0 months "));
assertEquals((accessor.getObject(3).toString()), ("2 years 2 months "));
assertEquals((accessor.getObject(4).toString()), ("0 years 0 months "));
assertEquals((accessor.getObject(5).toString()), ("1 year 10 months "));
accessor = itr.next().getValueVector().getAccessor();
// Check the interval day type
assertEquals((accessor.getObject(0).toString()), ("1 day 1:20:35.0"));
assertEquals((accessor.getObject(1).toString()), ("0 days 0:0:0.0"));
assertEquals((accessor.getObject(2).toString()), ("0 days 1:20:35.0"));
assertEquals((accessor.getObject(3).toString()), ("1 day 1:20:35.897"));
assertEquals((accessor.getObject(4).toString()), ("0 days 0:0:35.4"));
assertEquals((accessor.getObject(5).toString()), ("1 day 0:-39:-25.0"));
batchLoader.clear();
for(QueryDataBatch b : results){
b.release();
}
}
}
@Test
public void testLiterals() throws Exception {
try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
Drillbit bit = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
// run query.
bit.run();
client.connect();
List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
Files.toString(DrillFileUtils.getResourceAsFile("/record/vector/test_all_date_literals.json"), Charsets.UTF_8)
.replace("#{TEST_FILE}", "/test_simple_date.json"));
RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
QueryDataBatch batch = results.get(0);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
String result[] = {"2008-02-27",
"2008-02-27 01:02:03.000",
"10:11:13.999",
"2 years 2 months 3 days 0:1:3.89"};
int idx = 0;
for (VectorWrapper<?> v : batchLoader) {
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
assertEquals((accessor.getObject(0).toString()), (result[idx++]));
}
batchLoader.clear();
for(QueryDataBatch b : results){
b.release();
}
}
}
@Test
public void testDateAdd() throws Exception {
try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
Drillbit bit = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
// run query.
bit.run();
client.connect();
List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
Files.toString(DrillFileUtils.getResourceAsFile("/record/vector/test_date_add.json"), Charsets.UTF_8)
.replace("#{TEST_FILE}", "/test_simple_date.json"));
RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
QueryDataBatch batch = results.get(0);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
for (VectorWrapper<?> v : batchLoader) {
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
assertEquals((accessor.getObject(0).toString()), ("2008-03-27 00:00:00.000"));
}
batchLoader.clear();
for(QueryDataBatch b : results){
b.release();
}
}
}
}
| |
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2017 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.spider.filters;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.BDDMockito.given;
import static org.mockito.Matchers.anyString;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.httpclient.URI;
import org.apache.log4j.Logger;
import org.apache.log4j.varia.NullAppender;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.zaproxy.zap.model.Context;
import org.zaproxy.zap.spider.DomainAlwaysInScopeMatcher;
import org.zaproxy.zap.spider.filters.FetchFilter.FetchStatus;
/**
* Unit test for {@link DefaultFetchFilter}.
*/
@RunWith(MockitoJUnitRunner.class)
public class DefaultFetchFilterUnitTest {
@Mock
Context context;
private DefaultFetchFilter filter;
@BeforeClass
public static void suppressLogging() {
Logger.getRootLogger().addAppender(new NullAppender());
}
@Before
public void setUp() {
filter = new DefaultFetchFilter();
}
@Test
public void shouldFilterUriWithNonSchemeAsIllegalProtocol() throws Exception {
// Given
URI uri = createUri("example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.ILLEGAL_PROTOCOL)));
}
@Test
public void shouldFilterUriWithNonHttpOrHttpsSchemeAsIllegalProtocol() throws Exception {
// Given
URI uri = createUri("ftp://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.ILLEGAL_PROTOCOL)));
}
@Test
public void shouldFilterUriWithHttpSchemeAsOutOfScopeByDefault() throws Exception {
// Given
URI uri = createUri("http://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.OUT_OF_SCOPE)));
}
@Test
public void shouldFilterUriWithHttpsSchemeAsOutOfScopeByDefault() throws Exception {
// Given
URI uri = createUri("https://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.OUT_OF_SCOPE)));
}
@Test
public void shouldFilterOutOfScopeUriAsOutOfScope() throws Exception {
// Given
filter.addScopeRegex("scope.example.com");
URI uri = createUri("http://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.OUT_OF_SCOPE)));
}
@Test
public void shouldFilterInScopeUriAsValid() throws Exception {
// Given
filter.addScopeRegex("example.com");
URI uri = createUri("http://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.VALID)));
}
@Test
public void shouldFilterNonAlwaysInScopeUriAsOutOfScope() throws Exception {
// Given
filter.setDomainsAlwaysInScope(domainsAlwaysInScope("scope.example.com"));
URI uri = createUri("https://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.OUT_OF_SCOPE)));
}
@Test
public void shouldFilterAlwaysInScopeUriAsValid() throws Exception {
// Given
filter.setDomainsAlwaysInScope(domainsAlwaysInScope("example.com"));
URI uri = createUri("https://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.VALID)));
}
@Test
public void shouldFilterExcludedInScopeUriAsUserRules() throws Exception {
// Given
filter.addScopeRegex("example.com");
filter.setExcludeRegexes(excludeRegexes(".*example\\.com.*"));
URI uri = createUri("http://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.USER_RULES)));
}
@Test
public void shouldFilterExcludedAlwaysInScopeUriAsUserRules() throws Exception {
// Given
filter.setDomainsAlwaysInScope(domainsAlwaysInScope("example.com"));
filter.setExcludeRegexes(excludeRegexes(".*example\\.com.*"));
URI uri = createUri("http://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.USER_RULES)));
}
@Test
public void shouldFilterNonExcludedInScopeUriAsValid() throws Exception {
// Given
filter.addScopeRegex("example.com");
filter.setExcludeRegexes(excludeRegexes("subdomain\\.example\\.com.*"));
URI uri = createUri("http://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.VALID)));
}
@Test
public void shouldFilterNonExcludedAlwaysInScopeUriAsValid() throws Exception {
// Given
filter.setDomainsAlwaysInScope(domainsAlwaysInScope("example.com"));
filter.setExcludeRegexes(excludeRegexes("subdomain\\.example\\.com.*"));
URI uri = createUri("http://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.VALID)));
}
@Test
public void shouldFilterOutOfContextUriAsOutOfContext() throws Exception {
// Given
filter.setScanContext(contextInScope(false));
URI uri = createUri("http://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.OUT_OF_CONTEXT)));
}
@Test
public void shouldFilterInContextUriAsValid() throws Exception {
// Given
filter.setScanContext(contextInScope(true));
URI uri = createUri("http://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.VALID)));
}
@Test
public void shouldFilterExcludedInContextUriAsUserRules() throws Exception {
// Given
filter.setScanContext(contextInScope(true));
filter.setExcludeRegexes(excludeRegexes(".*example\\.com.*"));
URI uri = createUri("http://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.USER_RULES)));
}
@Test
public void shouldFilterNonExcludedInContextUriAsValid() throws Exception {
// Given
filter.setScanContext(contextInScope(true));
filter.setExcludeRegexes(excludeRegexes("subdomain\\.example\\.com.*"));
URI uri = createUri("http://example.com");
// When
FetchStatus status = filter.checkFilter(uri);
// Then
assertThat(status, is(equalTo(FetchStatus.VALID)));
}
private static URI createUri(String uri) {
try {
return new URI(uri, true);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private List<DomainAlwaysInScopeMatcher> domainsAlwaysInScope(String... domains) {
if (domains == null || domains.length == 0) {
return Collections.emptyList();
}
List<DomainAlwaysInScopeMatcher> domainsAlwaysInScope = new ArrayList<>(1);
for (String domain : domains) {
domainsAlwaysInScope.add(new DomainAlwaysInScopeMatcher(domain));
}
return domainsAlwaysInScope;
}
private List<String> excludeRegexes(String... regexes) {
if (regexes == null || regexes.length == 0) {
return Collections.emptyList();
}
List<String> excludedRegexes = new ArrayList<>(1);
for (String regex : regexes) {
excludedRegexes.add(regex);
}
return excludedRegexes;
}
private Context contextInScope(boolean inScope) {
given(context.isInContext(anyString())).willReturn(inScope);
return context;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.test.integration.functions.binary.matrix_full_cellwise;
import java.util.HashMap;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.sysml.api.DMLScript;
import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
import org.apache.sysml.lops.LopProperties.ExecType;
import org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex;
import org.apache.sysml.test.integration.AutomatedTestBase;
import org.apache.sysml.test.integration.TestConfiguration;
import org.apache.sysml.test.utils.TestUtils;
public class FullMatrixMatrixCellwiseOperationTest extends AutomatedTestBase
{
private final static String TEST_NAME1 = "FullMatrixCellwiseOperation_Addition";
private final static String TEST_NAME2 = "FullMatrixCellwiseOperation_Substraction";
private final static String TEST_NAME3 = "FullMatrixCellwiseOperation_Multiplication";
private final static String TEST_NAME4 = "FullMatrixCellwiseOperation_Division";
private final static String TEST_DIR = "functions/binary/matrix_full_cellwise/";
private final static String TEST_CLASS_DIR = TEST_DIR + FullMatrixMatrixCellwiseOperationTest.class.getSimpleName() + "/";
private final static double eps = 1e-10;
private final static int rows = 1100;
private final static int cols = 900;
private final static double sparsity1 = 0.7;
private final static double sparsity2 = 0.1;
private enum OpType{
ADDITION,
SUBTRACTION,
MULTIPLICATION,
DIVISION
}
private enum SparsityType{
DENSE,
SPARSE,
EMPTY
}
@Override
public void setUp()
{
addTestConfiguration(TEST_NAME1,new TestConfiguration(TEST_CLASS_DIR, TEST_NAME1,new String[]{"C"}));
addTestConfiguration(TEST_NAME2,new TestConfiguration(TEST_CLASS_DIR, TEST_NAME2,new String[]{"C"}));
addTestConfiguration(TEST_NAME3,new TestConfiguration(TEST_CLASS_DIR, TEST_NAME3,new String[]{"C"}));
addTestConfiguration(TEST_NAME4,new TestConfiguration(TEST_CLASS_DIR, TEST_NAME4,new String[]{"C"}));
if (TEST_CACHE_ENABLED) {
setOutAndExpectedDeletionDisabled(true);
}
}
@BeforeClass
public static void init()
{
TestUtils.clearDirectory(TEST_DATA_DIR + TEST_CLASS_DIR);
}
@AfterClass
public static void cleanUp()
{
if (TEST_CACHE_ENABLED) {
TestUtils.clearDirectory(TEST_DATA_DIR + TEST_CLASS_DIR);
}
}
// ----------------
@Test
public void testAdditionDenseDenseSP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.DENSE, SparsityType.DENSE, ExecType.SPARK);
}
@Test
public void testAdditionDenseSparseSP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.DENSE, SparsityType.SPARSE, ExecType.SPARK);
}
@Test
public void testAdditionDenseEmptySP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.DENSE, SparsityType.EMPTY, ExecType.SPARK);
}
@Test
public void testAdditionSparseDenseSP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.SPARSE, SparsityType.DENSE, ExecType.SPARK);
}
@Test
public void testAdditionSparseSparseSP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.SPARSE, SparsityType.SPARSE, ExecType.SPARK);
}
@Test
public void testAdditionSparseEmptySP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.SPARSE, SparsityType.EMPTY, ExecType.SPARK);
}
@Test
public void testAdditionEmptyDenseSP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.EMPTY, SparsityType.DENSE, ExecType.SPARK);
}
@Test
public void testAdditionEmptySparseSP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.EMPTY, SparsityType.SPARSE, ExecType.SPARK);
}
@Test
public void testAdditionEmptyEmptySP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.EMPTY, SparsityType.EMPTY, ExecType.SPARK);
}
@Test
public void testSubtractionDenseDenseSP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.DENSE, SparsityType.DENSE, ExecType.SPARK);
}
@Test
public void testSubtractionDenseSparseSP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.DENSE, SparsityType.SPARSE, ExecType.SPARK);
}
@Test
public void testSubtractionDenseEmptySP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.DENSE, SparsityType.EMPTY, ExecType.SPARK);
}
@Test
public void testSubtractionSparseDenseSP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.SPARSE, SparsityType.DENSE, ExecType.SPARK);
}
@Test
public void testSubtractionSparseSparseSP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.SPARSE, SparsityType.SPARSE, ExecType.SPARK);
}
@Test
public void testSubtractionSparseEmptySP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.SPARSE, SparsityType.EMPTY, ExecType.SPARK);
}
@Test
public void testSubtractionEmptyDenseSP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.EMPTY, SparsityType.DENSE, ExecType.SPARK);
}
@Test
public void testSubtractionEmptySparseSP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.EMPTY, SparsityType.SPARSE, ExecType.SPARK);
}
@Test
public void testSubtractionEmptyEmptySP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.EMPTY, SparsityType.EMPTY, ExecType.SPARK);
}
@Test
public void testMultiplicationDenseDenseSP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.DENSE, SparsityType.DENSE, ExecType.SPARK);
}
@Test
public void testMultiplicationDenseSparseSP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.DENSE, SparsityType.SPARSE, ExecType.SPARK);
}
@Test
public void testMultiplicationDenseEmptySP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.DENSE, SparsityType.EMPTY, ExecType.SPARK);
}
@Test
public void testMultiplicationSparseDenseSP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.SPARSE, SparsityType.DENSE, ExecType.SPARK);
}
@Test
public void testMultiplicationSparseSparseSP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.SPARSE, SparsityType.SPARSE, ExecType.SPARK);
}
@Test
public void testMultiplicationSparseEmptySP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.SPARSE, SparsityType.EMPTY, ExecType.SPARK);
}
@Test
public void testMultiplicationEmptyDenseSP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.EMPTY, SparsityType.DENSE, ExecType.SPARK);
}
@Test
public void testMultiplicationEmptySparseSP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.EMPTY, SparsityType.SPARSE, ExecType.SPARK);
}
@Test
public void testMultiplicationEmptyEmptySP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.EMPTY, SparsityType.EMPTY, ExecType.SPARK);
}
@Test
public void testDivisionDenseDenseSP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.DENSE, SparsityType.DENSE, ExecType.SPARK);
}
@Test
public void testDivisionDenseSparseSP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.DENSE, SparsityType.SPARSE, ExecType.SPARK);
}
@Test
public void testDivisionDenseEmptySP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.DENSE, SparsityType.EMPTY, ExecType.SPARK);
}
@Test
public void testDivisionSparseDenseSP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.SPARSE, SparsityType.DENSE, ExecType.SPARK);
}
@Test
public void testDivisionSparseSparseSP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.SPARSE, SparsityType.SPARSE, ExecType.SPARK);
}
@Test
public void testDivisionSparseEmptySP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.SPARSE, SparsityType.EMPTY, ExecType.SPARK);
}
@Test
public void testDivisionEmptyDenseSP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.EMPTY, SparsityType.DENSE, ExecType.SPARK);
}
@Test
public void testDivisionEmptySparseSP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.EMPTY, SparsityType.SPARSE, ExecType.SPARK);
}
@Test
public void testDivisionEmptyEmptySP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.EMPTY, SparsityType.EMPTY, ExecType.SPARK);
}
// ----------------
@Test
public void testAdditionDenseDenseCP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.DENSE, SparsityType.DENSE, ExecType.CP);
}
@Test
public void testAdditionDenseSparseCP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.DENSE, SparsityType.SPARSE, ExecType.CP);
}
@Test
public void testAdditionDenseEmptyCP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.DENSE, SparsityType.EMPTY, ExecType.CP);
}
@Test
public void testAdditionSparseDenseCP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.SPARSE, SparsityType.DENSE, ExecType.CP);
}
@Test
public void testAdditionSparseSparseCP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.SPARSE, SparsityType.SPARSE, ExecType.CP);
}
@Test
public void testAdditionSparseEmptyCP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.SPARSE, SparsityType.EMPTY, ExecType.CP);
}
@Test
public void testAdditionEmptyDenseCP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.EMPTY, SparsityType.DENSE, ExecType.CP);
}
@Test
public void testAdditionEmptySparseCP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.EMPTY, SparsityType.SPARSE, ExecType.CP);
}
@Test
public void testAdditionEmptyEmptyCP()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.EMPTY, SparsityType.EMPTY, ExecType.CP);
}
@Test
public void testAdditionDenseDenseMR()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.DENSE, SparsityType.DENSE, ExecType.MR);
}
@Test
public void testAdditionDenseSparseMR()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.DENSE, SparsityType.SPARSE, ExecType.MR);
}
@Test
public void testAdditionDenseEmptyMR()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.DENSE, SparsityType.EMPTY, ExecType.MR);
}
@Test
public void testAdditionSparseDenseMR()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.SPARSE, SparsityType.DENSE, ExecType.MR);
}
@Test
public void testAdditionSparseSparseMR()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.SPARSE, SparsityType.SPARSE, ExecType.MR);
}
@Test
public void testAdditionSparseEmptyMR()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.SPARSE, SparsityType.EMPTY, ExecType.MR);
}
@Test
public void testAdditionEmptyDenseMR()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.EMPTY, SparsityType.DENSE, ExecType.MR);
}
@Test
public void testAdditionEmptySparseMR()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.EMPTY, SparsityType.SPARSE, ExecType.MR);
}
@Test
public void testAdditionEmptyEmptyMR()
{
runMatrixCellwiseOperationTest(OpType.ADDITION, SparsityType.EMPTY, SparsityType.EMPTY, ExecType.MR);
}
@Test
public void testSubtractionDenseDenseCP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.DENSE, SparsityType.DENSE, ExecType.CP);
}
@Test
public void testSubtractionDenseSparseCP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.DENSE, SparsityType.SPARSE, ExecType.CP);
}
@Test
public void testSubtractionDenseEmptyCP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.DENSE, SparsityType.EMPTY, ExecType.CP);
}
@Test
public void testSubtractionSparseDenseCP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.SPARSE, SparsityType.DENSE, ExecType.CP);
}
@Test
public void testSubtractionSparseSparseCP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.SPARSE, SparsityType.SPARSE, ExecType.CP);
}
@Test
public void testSubtractionSparseEmptyCP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.SPARSE, SparsityType.EMPTY, ExecType.CP);
}
@Test
public void testSubtractionEmptyDenseCP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.EMPTY, SparsityType.DENSE, ExecType.CP);
}
@Test
public void testSubtractionEmptySparseCP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.EMPTY, SparsityType.SPARSE, ExecType.CP);
}
@Test
public void testSubtractionEmptyEmptyCP()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.EMPTY, SparsityType.EMPTY, ExecType.CP);
}
@Test
public void testSubtractionDenseDenseMR()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.DENSE, SparsityType.DENSE, ExecType.MR);
}
@Test
public void testSubtractionDenseSparseMR()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.DENSE, SparsityType.SPARSE, ExecType.MR);
}
@Test
public void testSubtractionDenseEmptyMR()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.DENSE, SparsityType.EMPTY, ExecType.MR);
}
@Test
public void testSubtractionSparseDenseMR()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.SPARSE, SparsityType.DENSE, ExecType.MR);
}
@Test
public void testSubtractionSparseSparseMR()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.SPARSE, SparsityType.SPARSE, ExecType.MR);
}
@Test
public void testSubtractionSparseEmptyMR()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.SPARSE, SparsityType.EMPTY, ExecType.MR);
}
@Test
public void testSubtractionEmptyDenseMR()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.EMPTY, SparsityType.DENSE, ExecType.MR);
}
@Test
public void testSubtractionEmptySparseMR()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.EMPTY, SparsityType.SPARSE, ExecType.MR);
}
@Test
public void testSubtractionEmptyEmptyMR()
{
runMatrixCellwiseOperationTest(OpType.SUBTRACTION, SparsityType.EMPTY, SparsityType.EMPTY, ExecType.MR);
}
@Test
public void testMultiplicationDenseDenseCP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.DENSE, SparsityType.DENSE, ExecType.CP);
}
@Test
public void testMultiplicationDenseSparseCP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.DENSE, SparsityType.SPARSE, ExecType.CP);
}
@Test
public void testMultiplicationDenseEmptyCP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.DENSE, SparsityType.EMPTY, ExecType.CP);
}
@Test
public void testMultiplicationSparseDenseCP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.SPARSE, SparsityType.DENSE, ExecType.CP);
}
@Test
public void testMultiplicationSparseSparseCP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.SPARSE, SparsityType.SPARSE, ExecType.CP);
}
@Test
public void testMultiplicationSparseEmptyCP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.SPARSE, SparsityType.EMPTY, ExecType.CP);
}
@Test
public void testMultiplicationEmptyDenseCP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.EMPTY, SparsityType.DENSE, ExecType.CP);
}
@Test
public void testMultiplicationEmptySparseCP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.EMPTY, SparsityType.SPARSE, ExecType.CP);
}
@Test
public void testMultiplicationEmptyEmptyCP()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.EMPTY, SparsityType.EMPTY, ExecType.CP);
}
@Test
public void testMultiplicationDenseDenseMR()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.DENSE, SparsityType.DENSE, ExecType.MR);
}
@Test
public void testMultiplicationDenseSparseMR()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.DENSE, SparsityType.SPARSE, ExecType.MR);
}
@Test
public void testMultiplicationDenseEmptyMR()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.DENSE, SparsityType.EMPTY, ExecType.MR);
}
@Test
public void testMultiplicationSparseDenseMR()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.SPARSE, SparsityType.DENSE, ExecType.MR);
}
@Test
public void testMultiplicationSparseSparseMR()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.SPARSE, SparsityType.SPARSE, ExecType.MR);
}
@Test
public void testMultiplicationSparseEmptyMR()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.SPARSE, SparsityType.EMPTY, ExecType.MR);
}
@Test
public void testMultiplicationEmptyDenseMR()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.EMPTY, SparsityType.DENSE, ExecType.MR);
}
@Test
public void testMultiplicationEmptySparseMR()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.EMPTY, SparsityType.SPARSE, ExecType.MR);
}
@Test
public void testMultiplicationEmptyEmptyMR()
{
runMatrixCellwiseOperationTest(OpType.MULTIPLICATION, SparsityType.EMPTY, SparsityType.EMPTY, ExecType.MR);
}
@Test
public void testDivisionDenseDenseCP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.DENSE, SparsityType.DENSE, ExecType.CP);
}
@Test
public void testDivisionDenseSparseCP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.DENSE, SparsityType.SPARSE, ExecType.CP);
}
@Test
public void testDivisionDenseEmptyCP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.DENSE, SparsityType.EMPTY, ExecType.CP);
}
@Test
public void testDivisionSparseDenseCP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.SPARSE, SparsityType.DENSE, ExecType.CP);
}
@Test
public void testDivisionSparseSparseCP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.SPARSE, SparsityType.SPARSE, ExecType.CP);
}
@Test
public void testDivisionSparseEmptyCP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.SPARSE, SparsityType.EMPTY, ExecType.CP);
}
@Test
public void testDivisionEmptyDenseCP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.EMPTY, SparsityType.DENSE, ExecType.CP);
}
@Test
public void testDivisionEmptySparseCP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.EMPTY, SparsityType.SPARSE, ExecType.CP);
}
@Test
public void testDivisionEmptyEmptyCP()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.EMPTY, SparsityType.EMPTY, ExecType.CP);
}
@Test
public void testDivisionDenseDenseMR()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.DENSE, SparsityType.DENSE, ExecType.MR);
}
@Test
public void testDivisionDenseSparseMR()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.DENSE, SparsityType.SPARSE, ExecType.MR);
}
@Test
public void testDivisionDenseEmptyMR()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.DENSE, SparsityType.EMPTY, ExecType.MR);
}
@Test
public void testDivisionSparseDenseMR()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.SPARSE, SparsityType.DENSE, ExecType.MR);
}
@Test
public void testDivisionSparseSparseMR()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.SPARSE, SparsityType.SPARSE, ExecType.MR);
}
@Test
public void testDivisionSparseEmptyMR()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.SPARSE, SparsityType.EMPTY, ExecType.MR);
}
@Test
public void testDivisionEmptyDenseMR()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.EMPTY, SparsityType.DENSE, ExecType.MR);
}
@Test
public void testDivisionEmptySparseMR()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.EMPTY, SparsityType.SPARSE, ExecType.MR);
}
@Test
public void testDivisionEmptyEmptyMR()
{
runMatrixCellwiseOperationTest(OpType.DIVISION, SparsityType.EMPTY, SparsityType.EMPTY, ExecType.MR);
}
/**
*
* @param sparseM1
* @param sparseM2
* @param instType
*/
private void runMatrixCellwiseOperationTest( OpType type, SparsityType sparseM1, SparsityType sparseM2, ExecType instType)
{
//rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
switch( instType ){
case MR: rtplatform = RUNTIME_PLATFORM.HADOOP; break;
case SPARK: rtplatform = RUNTIME_PLATFORM.SPARK; break;
default: rtplatform = RUNTIME_PLATFORM.HYBRID; break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if( rtplatform == RUNTIME_PLATFORM.SPARK )
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try
{
String TEST_NAME = null;
switch( type )
{
case ADDITION: TEST_NAME = TEST_NAME1; break;
case SUBTRACTION: TEST_NAME = TEST_NAME2; break;
case MULTIPLICATION: TEST_NAME = TEST_NAME3; break;
case DIVISION: TEST_NAME = TEST_NAME4; break;
}
TestConfiguration config = getTestConfiguration(TEST_NAME);
//get sparsity
double lsparsity1 = 1.0, lsparsity2 = 1.0;
switch( sparseM1 ){
case DENSE: lsparsity1 = sparsity1; break;
case SPARSE: lsparsity1 = sparsity2; break;
case EMPTY: lsparsity1 = 0.0; break;
}
switch( sparseM2 ){
case DENSE: lsparsity2 = sparsity1; break;
case SPARSE: lsparsity2 = sparsity2; break;
case EMPTY: lsparsity2 = 0.0; break;
}
String TEST_CACHE_DIR = "";
if (TEST_CACHE_ENABLED && (type != OpType.DIVISION))
{
TEST_CACHE_DIR = type.ordinal() + "_" + lsparsity1 + "_" + lsparsity2 + "/";
}
loadTestConfiguration(config, TEST_CACHE_DIR);
/* This is for running the junit test the new way, i.e., construct the arguments directly */
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[]{"-explain", "-args",
input("A"), Integer.toString(rows), Integer.toString(cols),
input("B"), Integer.toString(rows), Integer.toString(cols),
output("C") };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
//generate actual dataset
double[][] A = getRandomMatrix(rows, cols, 0, (lsparsity1==0)?0:1, lsparsity1, 7);
writeInputMatrix("A", A, true);
double[][] B = getRandomMatrix(rows, cols, 0, (lsparsity2==0)?0:1, lsparsity2, 3);
writeInputMatrix("B", B, true);
boolean exceptionExpected = false;
runTest(true, exceptionExpected, null, -1);
if( !(type==OpType.DIVISION) )
{
runRScript(true);
//compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("C");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("C");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
}
else
{
//For division, IEEE 754 defines x/0.0 as INFINITY and 0.0/0.0 as NaN.
//Java handles this correctly while R always returns 1.0E308 in those cases.
//Hence, we directly write the expected results.
double C[][] = new double[rows][cols];
for( int i=0; i<rows; i++ )
for( int j=0; j<cols; j++ )
C[i][j] = A[i][j]/B[i][j];
writeExpectedMatrix("C", C);
compareResults();
}
}
finally
{
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.spi.core.remoting;
import javax.transaction.xa.XAException;
import javax.transaction.xa.Xid;
import java.util.HashMap;
import java.util.concurrent.Executor;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.Message;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.client.ClientConsumer;
import org.apache.activemq.artemis.api.core.client.ClientSession;
import org.apache.activemq.artemis.api.core.client.SendAcknowledgementHandler;
import org.apache.activemq.artemis.core.client.impl.ClientConsumerInternal;
import org.apache.activemq.artemis.core.client.impl.ClientLargeMessageInternal;
import org.apache.activemq.artemis.core.client.impl.ClientMessageInternal;
import org.apache.activemq.artemis.core.client.impl.ClientProducerCreditsImpl;
import org.apache.activemq.artemis.core.client.impl.ClientSessionInternal;
import org.apache.activemq.artemis.core.message.impl.MessageInternal;
import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection;
import org.apache.activemq.artemis.utils.IDGenerator;
import org.apache.activemq.artemis.utils.SimpleIDGenerator;
public abstract class SessionContext
{
protected ClientSessionInternal session;
protected SendAcknowledgementHandler sendAckHandler;
protected volatile RemotingConnection remotingConnection;
protected final IDGenerator idGenerator = new SimpleIDGenerator(0);
public SessionContext(RemotingConnection remotingConnection)
{
this.remotingConnection = remotingConnection;
}
public ClientSessionInternal getSession()
{
return session;
}
public void setSession(ClientSessionInternal session)
{
this.session = session;
}
/**
* it will eather reattach or reconnect, preferably reattaching it.
*
* @param newConnection
* @return true if it was possible to reattach
* @throws ActiveMQException
*/
public abstract boolean reattachOnNewConnection(RemotingConnection newConnection) throws ActiveMQException;
public RemotingConnection getRemotingConnection()
{
return remotingConnection;
}
public abstract void closeConsumer(ClientConsumer consumer) throws ActiveMQException;
public abstract void sendConsumerCredits(ClientConsumer consumer, int credits);
public abstract boolean supportsLargeMessage();
protected void handleReceiveLargeMessage(ConsumerContext consumerID, ClientLargeMessageInternal clientLargeMessage, long largeMessageSize) throws Exception
{
ClientSessionInternal session = this.session;
if (session != null)
{
session.handleReceiveLargeMessage(consumerID, clientLargeMessage, largeMessageSize);
}
}
protected void handleReceiveMessage(ConsumerContext consumerID, final ClientMessageInternal message) throws Exception
{
ClientSessionInternal session = this.session;
if (session != null)
{
session.handleReceiveMessage(consumerID, message);
}
}
protected void handleReceiveContinuation(final ConsumerContext consumerID, byte[] chunk, int flowControlSize, boolean isContinues) throws Exception
{
ClientSessionInternal session = this.session;
if (session != null)
{
session.handleReceiveContinuation(consumerID, chunk, flowControlSize, isContinues);
}
}
protected void handleReceiveProducerCredits(SimpleString address, int credits)
{
ClientSessionInternal session = this.session;
if (session != null)
{
session.handleReceiveProducerCredits(address, credits);
}
}
protected void handleReceiveProducerFailCredits(SimpleString address, int credits)
{
ClientSessionInternal session = this.session;
if (session != null)
{
session.handleReceiveProducerFailCredits(address, credits);
}
}
public abstract int getCreditsOnSendingFull(MessageInternal msgI);
public abstract void sendFullMessage(MessageInternal msgI, boolean sendBlocking, SendAcknowledgementHandler handler, SimpleString defaultAddress) throws ActiveMQException;
/**
* it should return the number of credits (or bytes) used to send this packet
*
* @param msgI
* @return
* @throws ActiveMQException
*/
public abstract int sendInitialChunkOnLargeMessage(MessageInternal msgI) throws ActiveMQException;
public abstract int sendLargeMessageChunk(MessageInternal msgI, long messageBodySize, boolean sendBlocking, boolean lastChunk, byte[] chunk, SendAcknowledgementHandler messageHandler) throws ActiveMQException;
public abstract void setSendAcknowledgementHandler(final SendAcknowledgementHandler handler);
public abstract void createSharedQueue(SimpleString address,
SimpleString queueName,
SimpleString filterString,
boolean durable) throws ActiveMQException;
public abstract void deleteQueue(SimpleString queueName) throws ActiveMQException;
public abstract void createQueue(SimpleString address, SimpleString queueName, SimpleString filterString, boolean durable, boolean temp) throws ActiveMQException;
public abstract ClientSession.QueueQuery queueQuery(SimpleString queueName) throws ActiveMQException;
public abstract void forceDelivery(ClientConsumer consumer, long sequence) throws ActiveMQException;
public abstract ClientSession.AddressQuery addressQuery(final SimpleString address) throws ActiveMQException;
public abstract void simpleCommit() throws ActiveMQException;
/**
* If we are doing a simple rollback on the RA, we need to ack the last message sent to the consumer,
* otherwise DLQ won't work.
* <p/>
* this is because we only ACK after on the RA, We may review this if we always acked earlier.
*
* @param lastMessageAsDelivered
* @throws ActiveMQException
*/
public abstract void simpleRollback(boolean lastMessageAsDelivered) throws ActiveMQException;
public abstract void sessionStart() throws ActiveMQException;
public abstract void sessionStop() throws ActiveMQException;
public abstract void sendACK(boolean individual, boolean block, final ClientConsumer consumer, final Message message) throws ActiveMQException;
public abstract void expireMessage(final ClientConsumer consumer, Message message) throws ActiveMQException;
public abstract void sessionClose() throws ActiveMQException;
public abstract void addSessionMetadata(String key, String data) throws ActiveMQException;
public abstract void addUniqueMetaData(String key, String data) throws ActiveMQException;
public abstract void sendProducerCreditsMessage(final int credits, final SimpleString address);
public abstract void xaCommit(Xid xid, boolean onePhase) throws XAException, ActiveMQException;
public abstract void xaEnd(Xid xid, int flags) throws XAException, ActiveMQException;
public abstract void xaForget(Xid xid) throws XAException, ActiveMQException;
public abstract int xaPrepare(Xid xid) throws XAException, ActiveMQException;
public abstract Xid[] xaScan() throws ActiveMQException;
public abstract void xaRollback(Xid xid, boolean wasStarted) throws ActiveMQException, XAException;
public abstract void xaStart(Xid xid, int flags) throws XAException, ActiveMQException;
public abstract boolean configureTransactionTimeout(int seconds) throws ActiveMQException;
public abstract ClientConsumerInternal createConsumer(SimpleString queueName, SimpleString filterString, int windowSize, int maxRate, int ackBatchSize, boolean browseOnly,
Executor executor, Executor flowControlExecutor) throws ActiveMQException;
/**
* Performs a round trip to the server requesting what is the current tx timeout on the session
*
* @return
*/
public abstract int recoverSessionTimeout() throws ActiveMQException;
public abstract int getServerVersion();
public abstract void recreateSession(final String username,
final String password,
final int minLargeMessageSize,
final boolean xa,
final boolean autoCommitSends,
final boolean autoCommitAcks,
final boolean preAcknowledge,
final SimpleString defaultAddress) throws ActiveMQException;
public abstract void recreateConsumerOnServer(ClientConsumerInternal consumerInternal) throws ActiveMQException;
public abstract void xaFailed(Xid xid) throws ActiveMQException;
public abstract void restartSession() throws ActiveMQException;
public abstract void resetMetadata(HashMap<String, String> metaDataToSend);
// Failover utility classes
/**
* Interrupt and return any blocked calls
*/
public abstract void returnBlocking(ActiveMQException cause);
/**
* it will lock the communication channel of the session avoiding anything to come while failover is happening.
* It happens on preFailover from ClientSessionImpl
*/
public abstract void lockCommunications();
public abstract void releaseCommunications();
public abstract void cleanup();
public abstract void linkFlowControl(SimpleString address, ClientProducerCreditsImpl clientProducerCredits);
}
| |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.json;
import com.facebook.buck.util.Ansi;
import com.facebook.buck.util.InputStreamConsumer;
import com.facebook.buck.util.ProjectFilesystem;
import com.facebook.buck.util.Threads;
import com.facebook.buck.util.environment.Platform;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* Delegates to buck.py for parsing of buck build files. Constructed on demand for the
* parsing phase and must be closed afterward to free up resources.
*/
public class ProjectBuildFileParser implements AutoCloseable {
/** Path to the buck.py script that is used to evaluate a build file. */
private static final String PATH_TO_BUCK_PY = System.getProperty("buck.path_to_buck_py",
"src/com/facebook/buck/parser/buck.py");
private Process buckPyProcess;
private BuildFileToJsonParser buckPyStdoutParser;
private BufferedWriter buckPyStdinWriter;
private final File projectRoot;
private final ImmutableSet<String> ignorePaths;
private final ImmutableList<String> commonIncludes;
private final String pythonInterpreter;
private boolean isServerMode;
private boolean isInitialized;
private boolean isClosed;
public ProjectBuildFileParser(
ProjectFilesystem projectFilesystem,
Iterable<String> commonIncludes,
String pythonInterpreter) {
this.projectRoot = projectFilesystem.getProjectRoot();
this.ignorePaths = projectFilesystem.getIgnorePaths();
this.commonIncludes = ImmutableList.copyOf(commonIncludes);
this.pythonInterpreter = Preconditions.checkNotNull(pythonInterpreter);
// Default to server mode unless explicitly unset internally.
setServerMode(true);
}
/**
* Sets whether buck.py will use --server mode. Server mode communicates via
* stdin/stdout to accept new BUCK files to parse in a long running fashion. It
* also changes the stdout format so that output has an extra layer of structure
* sufficient to communicate state and coordinate on individual BUCK files
* submitted.
* <p>
* Note that you must not invoke this method after initialization.
*/
private void setServerMode(boolean isServerMode) {
ensureNotClosed();
ensureNotInitialized();
this.isServerMode = isServerMode;
}
private void ensureNotClosed() {
Preconditions.checkState(!isClosed);
}
private void ensureNotInitialized() {
Preconditions.checkState(!isInitialized);
}
/**
* Initialization on demand moves around the performance impact of creating the Jython
* interpreter to when parsing actually begins. This makes it easier to attribute this time
* to the actual parse phase.
*/
private void initIfNeeded() throws IOException {
ensureNotClosed();
if (!isInitialized) {
init();
isInitialized = true;
}
}
/**
* Initialize the parser, starting buck.py.
*/
private void init() throws IOException {
ProcessBuilder processBuilder = new ProcessBuilder(buildArgs());
buckPyProcess = processBuilder.start();
OutputStream stdin = buckPyProcess.getOutputStream();
InputStream stderr = buckPyProcess.getErrorStream();
Thread stderrConsumer = Threads.namedThread(
ProjectBuildFileParser.class.getSimpleName(),
new InputStreamConsumer(stderr,
System.err,
new Ansi(Platform.detect())));
stderrConsumer.start();
buckPyStdinWriter = new BufferedWriter(new OutputStreamWriter(stdin));
// TODO(mbolin): Ensure that the Reader gets closed.
Reader reader = new InputStreamReader(buckPyProcess.getInputStream(), Charsets.UTF_8);
buckPyStdoutParser = new BuildFileToJsonParser(reader, isServerMode);
}
private ImmutableList<String> buildArgs() {
// Invoking buck.py and read JSON-formatted build rules from its stdout.
ImmutableList.Builder<String> argBuilder = ImmutableList.builder();
argBuilder.add(pythonInterpreter);
// Ask python to unbuffer stdout so that we can coordinate based on the output as it is
// produced.
argBuilder.add("-u");
argBuilder.add(PATH_TO_BUCK_PY);
if (isServerMode) {
// Provide BUCK files to parse via buck.py's stdin.
argBuilder.add("--server");
}
argBuilder.add("--project_root", projectRoot.getAbsolutePath());
// Add the --include flags.
for (String include : commonIncludes) {
argBuilder.add("--include");
argBuilder.add(include);
}
for (String path : ignorePaths) {
argBuilder.add("--ignore_path");
argBuilder.add(path);
}
return argBuilder.build();
}
/**
* Create, parse and destroy the parser in one step for an entire project. This should
* only be used when the tree must be parsed without a specific target to be built or
* otherwise operated upon.
*/
public static List<Map<String, Object>> getAllRulesInProject(
ProjectBuildFileParserFactory factory,
Iterable<String> includes)
throws BuildFileParseException {
try (ProjectBuildFileParser buildFileParser = factory.createParser(includes)) {
buildFileParser.setServerMode(false);
return buildFileParser.getAllRulesInternal(Optional.<String>absent());
} catch (IOException e) {
throw BuildFileParseException.createForGenericBuildFileParseError(e);
}
}
/**
* Collect all rules from a particular build file.
*
* @param buildFile should be an absolute path to a build file. Must have rootPath as its prefix.
*/
public List<Map<String, Object>> getAllRules(String buildFile)
throws BuildFileParseException {
List<Map<String, Object>> result = getAllRulesAndMetaRules(buildFile);
// Strip out the __includes meta rule, which is the last rule.
return Collections.unmodifiableList(result.subList(0, result.size() - 1));
}
/**
* Collect all rules from a particular build file, along with meta rules about the rules, for
* example which build files the rules depend on.
*
* @param buildFile should be an absolute path to a build file. Must have rootPath as its prefix.
*/
public List<Map<String, Object>> getAllRulesAndMetaRules(String buildFile)
throws BuildFileParseException {
try {
return getAllRulesInternal(Optional.of(buildFile));
} catch (IOException e) {
throw BuildFileParseException.createForBuildFileParseError(buildFile, e);
}
}
@VisibleForTesting
protected List<Map<String, Object>> getAllRulesInternal(Optional<String> buildFile)
throws IOException {
ensureNotClosed();
initIfNeeded();
// When in server mode, we require a build file. When not in server mode, we
// cannot accept a build file. Pretty stupid, actually. Consider fixing this.
Preconditions.checkState(buildFile.isPresent() == isServerMode);
if (buildFile.isPresent()) {
buckPyStdinWriter.write(buildFile.get());
buckPyStdinWriter.newLine();
buckPyStdinWriter.flush();
}
return buckPyStdoutParser.nextRules();
}
@Override
@SuppressWarnings("PMD.EmptyCatchBlock")
public void close() throws BuildFileParseException {
if (isClosed) {
return;
}
try {
if (isInitialized) {
if (isServerMode) {
// Allow buck.py to terminate gracefully.
try {
buckPyStdinWriter.close();
} catch (IOException e) {
// Safe to ignore since we've already flushed everything we wanted
// to write.
}
}
try {
int exitCode = buckPyProcess.waitFor();
if (exitCode != 0) {
BuildFileParseException.createForUnknownParseError(
String.format("Parser did not exit cleanly (exit code: %d)", exitCode));
}
} catch (InterruptedException e) {
throw Throwables.propagate(e);
}
}
} finally {
isClosed = true;
}
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*
*/
package com.microsoft.azure.management.apimanagement.v2019_01_01.implementation;
import com.microsoft.azure.arm.model.implementation.WrapperImpl;
import com.microsoft.azure.management.apimanagement.v2019_01_01.Tags;
import rx.Completable;
import rx.Observable;
import rx.functions.Func1;
import com.microsoft.azure.Page;
import com.microsoft.azure.management.apimanagement.v2019_01_01.TagContract;
class TagsImpl extends WrapperImpl<TagsInner> implements Tags {
private final ApiManagementManager manager;
TagsImpl(ApiManagementManager manager) {
super(manager.inner().tags());
this.manager = manager;
}
public ApiManagementManager manager() {
return this.manager;
}
@Override
public TagContractImpl define(String name) {
return wrapModel(name);
}
private TagContractImpl wrapModel(TagContractInner inner) {
return new TagContractImpl(inner, manager());
}
private TagContractImpl wrapModel(String name) {
return new TagContractImpl(name, this.manager());
}
@Override
public Completable getEntityStateByOperationAsync(String resourceGroupName, String serviceName, String apiId, String operationId, String tagId) {
TagsInner client = this.inner();
return client.getEntityStateByOperationAsync(resourceGroupName, serviceName, apiId, operationId, tagId).toCompletable();
}
@Override
public Observable<TagContract> listByApiAsync(final String resourceGroupName, final String serviceName, final String apiId) {
TagsInner client = this.inner();
return client.listByApiAsync(resourceGroupName, serviceName, apiId)
.flatMapIterable(new Func1<Page<TagContractInner>, Iterable<TagContractInner>>() {
@Override
public Iterable<TagContractInner> call(Page<TagContractInner> page) {
return page.items();
}
})
.map(new Func1<TagContractInner, TagContract>() {
@Override
public TagContract call(TagContractInner inner) {
return new TagContractImpl(inner, manager());
}
});
}
@Override
public Completable getEntityStateByApiAsync(String resourceGroupName, String serviceName, String apiId, String tagId) {
TagsInner client = this.inner();
return client.getEntityStateByApiAsync(resourceGroupName, serviceName, apiId, tagId).toCompletable();
}
@Override
public Observable<TagContract> getByApiAsync(String resourceGroupName, String serviceName, String apiId, String tagId) {
TagsInner client = this.inner();
return client.getByApiAsync(resourceGroupName, serviceName, apiId, tagId)
.map(new Func1<TagContractInner, TagContract>() {
@Override
public TagContract call(TagContractInner inner) {
return new TagContractImpl(inner, manager());
}
});
}
@Override
public Observable<TagContract> assignToApiAsync(String resourceGroupName, String serviceName, String apiId, String tagId) {
TagsInner client = this.inner();
return client.assignToApiAsync(resourceGroupName, serviceName, apiId, tagId)
.map(new Func1<TagContractInner, TagContract>() {
@Override
public TagContract call(TagContractInner inner) {
return new TagContractImpl(inner, manager());
}
});
}
@Override
public Completable detachFromApiAsync(String resourceGroupName, String serviceName, String apiId, String tagId) {
TagsInner client = this.inner();
return client.detachFromApiAsync(resourceGroupName, serviceName, apiId, tagId).toCompletable();
}
@Override
public Observable<TagContract> listByProductAsync(final String resourceGroupName, final String serviceName, final String productId) {
TagsInner client = this.inner();
return client.listByProductAsync(resourceGroupName, serviceName, productId)
.flatMapIterable(new Func1<Page<TagContractInner>, Iterable<TagContractInner>>() {
@Override
public Iterable<TagContractInner> call(Page<TagContractInner> page) {
return page.items();
}
})
.map(new Func1<TagContractInner, TagContract>() {
@Override
public TagContract call(TagContractInner inner) {
return new TagContractImpl(inner, manager());
}
});
}
@Override
public Completable getEntityStateByProductAsync(String resourceGroupName, String serviceName, String productId, String tagId) {
TagsInner client = this.inner();
return client.getEntityStateByProductAsync(resourceGroupName, serviceName, productId, tagId).toCompletable();
}
@Override
public Observable<TagContract> getByProductAsync(String resourceGroupName, String serviceName, String productId, String tagId) {
TagsInner client = this.inner();
return client.getByProductAsync(resourceGroupName, serviceName, productId, tagId)
.map(new Func1<TagContractInner, TagContract>() {
@Override
public TagContract call(TagContractInner inner) {
return new TagContractImpl(inner, manager());
}
});
}
@Override
public Observable<TagContract> assignToProductAsync(String resourceGroupName, String serviceName, String productId, String tagId) {
TagsInner client = this.inner();
return client.assignToProductAsync(resourceGroupName, serviceName, productId, tagId)
.map(new Func1<TagContractInner, TagContract>() {
@Override
public TagContract call(TagContractInner inner) {
return new TagContractImpl(inner, manager());
}
});
}
@Override
public Completable detachFromProductAsync(String resourceGroupName, String serviceName, String productId, String tagId) {
TagsInner client = this.inner();
return client.detachFromProductAsync(resourceGroupName, serviceName, productId, tagId).toCompletable();
}
@Override
public Observable<TagContract> listByServiceAsync(final String resourceGroupName, final String serviceName) {
TagsInner client = this.inner();
return client.listByServiceAsync(resourceGroupName, serviceName)
.flatMapIterable(new Func1<Page<TagContractInner>, Iterable<TagContractInner>>() {
@Override
public Iterable<TagContractInner> call(Page<TagContractInner> page) {
return page.items();
}
})
.map(new Func1<TagContractInner, TagContract>() {
@Override
public TagContract call(TagContractInner inner) {
return new TagContractImpl(inner, manager());
}
});
}
@Override
public Completable getEntityStateAsync(String resourceGroupName, String serviceName, String tagId) {
TagsInner client = this.inner();
return client.getEntityStateAsync(resourceGroupName, serviceName, tagId).toCompletable();
}
@Override
public Observable<TagContract> getAsync(String resourceGroupName, String serviceName, String tagId) {
TagsInner client = this.inner();
return client.getAsync(resourceGroupName, serviceName, tagId)
.map(new Func1<TagContractInner, TagContract>() {
@Override
public TagContract call(TagContractInner inner) {
return new TagContractImpl(inner, manager());
}
});
}
@Override
public Observable<TagContract> createOrUpdateAsync(String resourceGroupName, String serviceName, String tagId, String displayName) {
TagsInner client = this.inner();
return client.createOrUpdateAsync(resourceGroupName, serviceName, tagId, displayName)
.map(new Func1<TagContractInner, TagContract>() {
@Override
public TagContract call(TagContractInner inner) {
return new TagContractImpl(inner, manager());
}
});
}
@Override
public Completable updateAsync(String resourceGroupName, String serviceName, String tagId, String ifMatch, String displayName) {
TagsInner client = this.inner();
return client.updateAsync(resourceGroupName, serviceName, tagId, ifMatch, displayName).toCompletable();
}
@Override
public Completable deleteAsync(String resourceGroupName, String serviceName, String tagId, String ifMatch) {
TagsInner client = this.inner();
return client.deleteAsync(resourceGroupName, serviceName, tagId, ifMatch).toCompletable();
}
@Override
public Observable<TagContract> listByOperationAsync(final String resourceGroupName, final String serviceName, final String apiId, final String operationId) {
TagsInner client = this.inner();
return client.listByOperationAsync(resourceGroupName, serviceName, apiId, operationId)
.flatMapIterable(new Func1<Page<TagContractInner>, Iterable<TagContractInner>>() {
@Override
public Iterable<TagContractInner> call(Page<TagContractInner> page) {
return page.items();
}
})
.map(new Func1<TagContractInner, TagContract>() {
@Override
public TagContract call(TagContractInner inner) {
return wrapModel(inner);
}
});
}
@Override
public Observable<TagContract> getByOperationAsync(String resourceGroupName, String serviceName, String apiId, String operationId, String tagId) {
TagsInner client = this.inner();
return client.getByOperationAsync(resourceGroupName, serviceName, apiId, operationId, tagId)
.flatMap(new Func1<TagContractInner, Observable<TagContract>>() {
@Override
public Observable<TagContract> call(TagContractInner inner) {
if (inner == null) {
return Observable.empty();
} else {
return Observable.just((TagContract)wrapModel(inner));
}
}
});
}
@Override
public Completable detachFromOperationAsync(String resourceGroupName, String serviceName, String apiId, String operationId, String tagId) {
TagsInner client = this.inner();
return client.detachFromOperationAsync(resourceGroupName, serviceName, apiId, operationId, tagId).toCompletable();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.server.log;
import static java.nio.charset.StandardCharsets.UTF_8;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.UUID;
import org.apache.accumulo.core.clientImpl.ClientContext;
import org.apache.accumulo.core.util.Pair;
import org.apache.accumulo.fate.zookeeper.ZooReaderWriter;
import org.apache.accumulo.fate.zookeeper.ZooUtil.NodeExistsPolicy;
import org.apache.accumulo.fate.zookeeper.ZooUtil.NodeMissingPolicy;
import org.apache.accumulo.server.ServerContext;
import org.apache.accumulo.server.master.state.TServerInstance;
import org.apache.hadoop.fs.Path;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class governs the space in Zookeeper that advertises the status of Write-Ahead Logs in use
* by tablet servers and the replication machinery.
*
* <p>
* The Master needs to know the state of the WALs to mark tablets during recovery. The GC needs to
* know when a log is no longer needed so it can be removed. The replication mechanism needs to know
* when a log is closed and can be forwarded to the destination table.
*
* <p>
* The state of the WALs is kept in Zookeeper under /accumulo/<instanceid>/wals. For each
* server, there is a znode formatted like the TServerInstance.toString(): "host:port[sessionid]".
* Under the server znode, is a node for each log, using the UUID for the log. In each of the WAL
* znodes, is the current state of the log, and the full path to the log.
*
* <p>
* The state [OPEN, CLOSED, UNREFERENCED] is what the tablet server believes to be the state of the
* file.
*
* <p>
* In the event of a recovery, the log is identified as belonging to a dead server. The master will
* update the tablets assigned to that server with log references. Once all tablets have been
* reassigned and the log references are removed, the log will be eligible for deletion.
*
* <p>
* Even when a log is UNREFERENCED by the tablet server, the replication mechanism may still need
* the log. The GC will defer log removal until replication is finished with it.
*/
public class WalStateManager {
public class WalMarkerException extends Exception {
private static final long serialVersionUID = 1L;
public WalMarkerException(Exception ex) {
super(ex);
}
}
private static final Logger log = LoggerFactory.getLogger(WalStateManager.class);
public static final String ZWALS = "/wals";
public static enum WalState {
/* log is open, and may be written to */
OPEN,
/* log is closed, and will not be written to again */
CLOSED,
/* unreferenced: no tablet needs the log for recovery */
UNREFERENCED
}
private final ClientContext context;
private final ZooReaderWriter zoo;
private volatile boolean checkedExistance = false;
public WalStateManager(ServerContext context) {
this.context = context;
this.zoo = context.getZooReaderWriter();
}
private String root() throws WalMarkerException {
String root = context.getZooKeeperRoot() + ZWALS;
try {
if (!checkedExistance && !zoo.exists(root)) {
zoo.putPersistentData(root, new byte[0], NodeExistsPolicy.SKIP);
}
checkedExistance = true;
} catch (KeeperException | InterruptedException e) {
throw new WalMarkerException(e);
}
return root;
}
// Tablet server exists
public void initWalMarker(TServerInstance tsi) throws WalMarkerException {
byte[] data = new byte[0];
try {
zoo.putPersistentData(root() + "/" + tsi, data, NodeExistsPolicy.FAIL);
} catch (KeeperException | InterruptedException e) {
throw new WalMarkerException(e);
}
}
// Tablet server opens a new WAL
public void addNewWalMarker(TServerInstance tsi, Path path) throws WalMarkerException {
updateState(tsi, path, WalState.OPEN);
}
private void updateState(TServerInstance tsi, Path path, WalState state)
throws WalMarkerException {
byte[] data = (state + "," + path).getBytes(UTF_8);
try {
NodeExistsPolicy policy = NodeExistsPolicy.OVERWRITE;
if (state == WalState.OPEN) {
policy = NodeExistsPolicy.FAIL;
}
log.debug("Setting {} to {}", path.getName(), state);
zoo.putPersistentData(root() + "/" + tsi + "/" + path.getName(), data, policy);
} catch (KeeperException | InterruptedException e) {
throw new WalMarkerException(e);
}
}
// Tablet server has no references to the WAL
public void walUnreferenced(TServerInstance tsi, Path path) throws WalMarkerException {
updateState(tsi, path, WalState.UNREFERENCED);
}
private static Pair<WalState,Path> parse(byte[] data) {
String[] parts = new String(data, UTF_8).split(",");
return new Pair<>(WalState.valueOf(parts[0]), new Path(parts[1]));
}
// Master needs to know the logs for the given instance
public List<Path> getWalsInUse(TServerInstance tsi) throws WalMarkerException {
List<Path> result = new ArrayList<>();
try {
String zpath = root() + "/" + tsi;
zoo.sync(zpath);
for (String child : zoo.getChildren(zpath)) {
byte[] zdata = null;
try {
// This function is called by the Master. Its possible that Accumulo GC deletes an
// unreferenced WAL in ZK after the call to getChildren above. Catch this exception inside
// the loop so that not all children are ignored.
zdata = zoo.getData(zpath + "/" + child, null);
} catch (KeeperException.NoNodeException e) {
log.debug("WAL state removed {} {} during getWalsInUse. Likely a race condition between "
+ "master and GC.", tsi, child);
}
if (zdata != null) {
Pair<WalState,Path> parts = parse(zdata);
if (parts.getFirst() != WalState.UNREFERENCED) {
result.add(parts.getSecond());
}
}
}
} catch (KeeperException.NoNodeException e) {
log.debug("{} has no wal entry in zookeeper, assuming no logs", tsi);
} catch (KeeperException | InterruptedException e) {
throw new WalMarkerException(e);
}
return result;
}
// garbage collector wants the list of logs markers for all servers
public Map<TServerInstance,List<UUID>> getAllMarkers() throws WalMarkerException {
Map<TServerInstance,List<UUID>> result = new HashMap<>();
try {
String path = root();
for (String child : zoo.getChildren(path)) {
TServerInstance inst = new TServerInstance(child);
List<UUID> logs = result.get(inst);
if (logs == null) {
result.put(inst, logs = new ArrayList<>());
}
// This function is called by the Accumulo GC which deletes WAL markers. Therefore we do not
// expect the following call to fail because the WAL info in ZK was deleted.
for (String idString : zoo.getChildren(path + "/" + child)) {
logs.add(UUID.fromString(idString));
}
}
} catch (KeeperException | InterruptedException e) {
throw new WalMarkerException(e);
}
return result;
}
// garbage collector wants to know the state (open/closed) of a log, and the filename to delete
public Pair<WalState,Path> state(TServerInstance instance, UUID uuid) throws WalMarkerException {
try {
String path = root() + "/" + instance + "/" + uuid;
return parse(zoo.getData(path, null));
} catch (KeeperException | InterruptedException e) {
throw new WalMarkerException(e);
}
}
// utility combination of getAllMarkers and state
public Map<Path,WalState> getAllState() throws WalMarkerException {
Map<Path,WalState> result = new HashMap<>();
for (Entry<TServerInstance,List<UUID>> entry : getAllMarkers().entrySet()) {
for (UUID id : entry.getValue()) {
// This function is called by the Accumulo GC which deletes WAL markers. Therefore we do not
// expect the following call to fail because the WAL info in ZK was deleted.
Pair<WalState,Path> state = state(entry.getKey(), id);
result.put(state.getSecond(), state.getFirst());
}
}
return result;
}
// garbage collector knows it's safe to remove the marker for a closed log
public void removeWalMarker(TServerInstance instance, UUID uuid) throws WalMarkerException {
try {
log.debug("Removing {}", uuid);
String path = root() + "/" + instance + "/" + uuid;
zoo.delete(path, -1);
} catch (InterruptedException | KeeperException e) {
throw new WalMarkerException(e);
}
}
// garbage collector knows the instance is dead, and has no markers
public void forget(TServerInstance instance) throws WalMarkerException {
String path = root() + "/" + instance;
try {
zoo.recursiveDelete(path, NodeMissingPolicy.FAIL);
} catch (InterruptedException | KeeperException e) {
throw new WalMarkerException(e);
}
}
// tablet server can mark the log as closed (but still needed), for replication to begin
// master can mark a log as unreferenced after it has made log recovery markers on the tablets
// that need to be recovered
public void closeWal(TServerInstance instance, Path path) throws WalMarkerException {
updateState(instance, path, WalState.CLOSED);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: grpc/meta_master.proto
package alluxio.grpc;
/**
* Protobuf type {@code alluxio.grpc.meta.GetMasterInfoPOptions}
*/
public final class GetMasterInfoPOptions extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:alluxio.grpc.meta.GetMasterInfoPOptions)
GetMasterInfoPOptionsOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetMasterInfoPOptions.newBuilder() to construct.
private GetMasterInfoPOptions(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetMasterInfoPOptions() {
filter_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetMasterInfoPOptions(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
alluxio.grpc.MasterInfoField value = alluxio.grpc.MasterInfoField.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
filter_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00000001;
}
filter_.add(rawValue);
}
break;
}
case 10: {
int length = input.readRawVarint32();
int oldLimit = input.pushLimit(length);
while(input.getBytesUntilLimit() > 0) {
int rawValue = input.readEnum();
alluxio.grpc.MasterInfoField value = alluxio.grpc.MasterInfoField.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
filter_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00000001;
}
filter_.add(rawValue);
}
}
input.popLimit(oldLimit);
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
filter_ = java.util.Collections.unmodifiableList(filter_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return alluxio.grpc.MetaMasterProto.internal_static_alluxio_grpc_meta_GetMasterInfoPOptions_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return alluxio.grpc.MetaMasterProto.internal_static_alluxio_grpc_meta_GetMasterInfoPOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
alluxio.grpc.GetMasterInfoPOptions.class, alluxio.grpc.GetMasterInfoPOptions.Builder.class);
}
public static final int FILTER_FIELD_NUMBER = 1;
private java.util.List<java.lang.Integer> filter_;
private static final com.google.protobuf.Internal.ListAdapter.Converter<
java.lang.Integer, alluxio.grpc.MasterInfoField> filter_converter_ =
new com.google.protobuf.Internal.ListAdapter.Converter<
java.lang.Integer, alluxio.grpc.MasterInfoField>() {
public alluxio.grpc.MasterInfoField convert(java.lang.Integer from) {
alluxio.grpc.MasterInfoField result = alluxio.grpc.MasterInfoField.valueOf(from);
return result == null ? alluxio.grpc.MasterInfoField.LEADER_MASTER_ADDRESS : result;
}
};
/**
* <code>repeated .alluxio.grpc.meta.MasterInfoField filter = 1;</code>
*/
public java.util.List<alluxio.grpc.MasterInfoField> getFilterList() {
return new com.google.protobuf.Internal.ListAdapter<
java.lang.Integer, alluxio.grpc.MasterInfoField>(filter_, filter_converter_);
}
/**
* <code>repeated .alluxio.grpc.meta.MasterInfoField filter = 1;</code>
*/
public int getFilterCount() {
return filter_.size();
}
/**
* <code>repeated .alluxio.grpc.meta.MasterInfoField filter = 1;</code>
*/
public alluxio.grpc.MasterInfoField getFilter(int index) {
return filter_converter_.convert(filter_.get(index));
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < filter_.size(); i++) {
output.writeEnum(1, filter_.get(i));
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < filter_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeEnumSizeNoTag(filter_.get(i));
}
size += dataSize;
size += 1 * filter_.size();
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof alluxio.grpc.GetMasterInfoPOptions)) {
return super.equals(obj);
}
alluxio.grpc.GetMasterInfoPOptions other = (alluxio.grpc.GetMasterInfoPOptions) obj;
boolean result = true;
result = result && filter_.equals(other.filter_);
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getFilterCount() > 0) {
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + filter_.hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static alluxio.grpc.GetMasterInfoPOptions parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static alluxio.grpc.GetMasterInfoPOptions parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static alluxio.grpc.GetMasterInfoPOptions parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static alluxio.grpc.GetMasterInfoPOptions parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static alluxio.grpc.GetMasterInfoPOptions parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static alluxio.grpc.GetMasterInfoPOptions parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static alluxio.grpc.GetMasterInfoPOptions parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static alluxio.grpc.GetMasterInfoPOptions parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static alluxio.grpc.GetMasterInfoPOptions parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static alluxio.grpc.GetMasterInfoPOptions parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static alluxio.grpc.GetMasterInfoPOptions parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static alluxio.grpc.GetMasterInfoPOptions parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(alluxio.grpc.GetMasterInfoPOptions prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code alluxio.grpc.meta.GetMasterInfoPOptions}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:alluxio.grpc.meta.GetMasterInfoPOptions)
alluxio.grpc.GetMasterInfoPOptionsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return alluxio.grpc.MetaMasterProto.internal_static_alluxio_grpc_meta_GetMasterInfoPOptions_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return alluxio.grpc.MetaMasterProto.internal_static_alluxio_grpc_meta_GetMasterInfoPOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
alluxio.grpc.GetMasterInfoPOptions.class, alluxio.grpc.GetMasterInfoPOptions.Builder.class);
}
// Construct using alluxio.grpc.GetMasterInfoPOptions.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
filter_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return alluxio.grpc.MetaMasterProto.internal_static_alluxio_grpc_meta_GetMasterInfoPOptions_descriptor;
}
public alluxio.grpc.GetMasterInfoPOptions getDefaultInstanceForType() {
return alluxio.grpc.GetMasterInfoPOptions.getDefaultInstance();
}
public alluxio.grpc.GetMasterInfoPOptions build() {
alluxio.grpc.GetMasterInfoPOptions result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public alluxio.grpc.GetMasterInfoPOptions buildPartial() {
alluxio.grpc.GetMasterInfoPOptions result = new alluxio.grpc.GetMasterInfoPOptions(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
filter_ = java.util.Collections.unmodifiableList(filter_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.filter_ = filter_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof alluxio.grpc.GetMasterInfoPOptions) {
return mergeFrom((alluxio.grpc.GetMasterInfoPOptions)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(alluxio.grpc.GetMasterInfoPOptions other) {
if (other == alluxio.grpc.GetMasterInfoPOptions.getDefaultInstance()) return this;
if (!other.filter_.isEmpty()) {
if (filter_.isEmpty()) {
filter_ = other.filter_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureFilterIsMutable();
filter_.addAll(other.filter_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
alluxio.grpc.GetMasterInfoPOptions parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (alluxio.grpc.GetMasterInfoPOptions) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List<java.lang.Integer> filter_ =
java.util.Collections.emptyList();
private void ensureFilterIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
filter_ = new java.util.ArrayList<java.lang.Integer>(filter_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated .alluxio.grpc.meta.MasterInfoField filter = 1;</code>
*/
public java.util.List<alluxio.grpc.MasterInfoField> getFilterList() {
return new com.google.protobuf.Internal.ListAdapter<
java.lang.Integer, alluxio.grpc.MasterInfoField>(filter_, filter_converter_);
}
/**
* <code>repeated .alluxio.grpc.meta.MasterInfoField filter = 1;</code>
*/
public int getFilterCount() {
return filter_.size();
}
/**
* <code>repeated .alluxio.grpc.meta.MasterInfoField filter = 1;</code>
*/
public alluxio.grpc.MasterInfoField getFilter(int index) {
return filter_converter_.convert(filter_.get(index));
}
/**
* <code>repeated .alluxio.grpc.meta.MasterInfoField filter = 1;</code>
*/
public Builder setFilter(
int index, alluxio.grpc.MasterInfoField value) {
if (value == null) {
throw new NullPointerException();
}
ensureFilterIsMutable();
filter_.set(index, value.getNumber());
onChanged();
return this;
}
/**
* <code>repeated .alluxio.grpc.meta.MasterInfoField filter = 1;</code>
*/
public Builder addFilter(alluxio.grpc.MasterInfoField value) {
if (value == null) {
throw new NullPointerException();
}
ensureFilterIsMutable();
filter_.add(value.getNumber());
onChanged();
return this;
}
/**
* <code>repeated .alluxio.grpc.meta.MasterInfoField filter = 1;</code>
*/
public Builder addAllFilter(
java.lang.Iterable<? extends alluxio.grpc.MasterInfoField> values) {
ensureFilterIsMutable();
for (alluxio.grpc.MasterInfoField value : values) {
filter_.add(value.getNumber());
}
onChanged();
return this;
}
/**
* <code>repeated .alluxio.grpc.meta.MasterInfoField filter = 1;</code>
*/
public Builder clearFilter() {
filter_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:alluxio.grpc.meta.GetMasterInfoPOptions)
}
// @@protoc_insertion_point(class_scope:alluxio.grpc.meta.GetMasterInfoPOptions)
private static final alluxio.grpc.GetMasterInfoPOptions DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new alluxio.grpc.GetMasterInfoPOptions();
}
public static alluxio.grpc.GetMasterInfoPOptions getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final com.google.protobuf.Parser<GetMasterInfoPOptions>
PARSER = new com.google.protobuf.AbstractParser<GetMasterInfoPOptions>() {
public GetMasterInfoPOptions parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetMasterInfoPOptions(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetMasterInfoPOptions> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetMasterInfoPOptions> getParserForType() {
return PARSER;
}
public alluxio.grpc.GetMasterInfoPOptions getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel;
import io.netty.buffer.ByteBufAllocator;
import io.netty.util.DefaultAttributeMap;
import io.netty.util.ReferenceCountUtil;
import io.netty.util.internal.EmptyArrays;
import io.netty.util.internal.OneTimeTask;
import io.netty.util.internal.PlatformDependent;
import io.netty.util.internal.ThreadLocalRandom;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import java.io.IOException;
import java.net.ConnectException;
import java.net.InetSocketAddress;
import java.net.NoRouteToHostException;
import java.net.SocketAddress;
import java.net.SocketException;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.NotYetConnectedException;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
/**
* A skeletal {@link Channel} implementation.
*/
public abstract class AbstractChannel extends DefaultAttributeMap implements Channel {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(AbstractChannel.class);
static final ClosedChannelException CLOSED_CHANNEL_EXCEPTION = new ClosedChannelException();
static final NotYetConnectedException NOT_YET_CONNECTED_EXCEPTION = new NotYetConnectedException();
static {
CLOSED_CHANNEL_EXCEPTION.setStackTrace(EmptyArrays.EMPTY_STACK_TRACE);
NOT_YET_CONNECTED_EXCEPTION.setStackTrace(EmptyArrays.EMPTY_STACK_TRACE);
}
private MessageSizeEstimator.Handle estimatorHandle;
private final Channel parent;
private final long hashCode = ThreadLocalRandom.current().nextLong();
private final Unsafe unsafe;
private final DefaultChannelPipeline pipeline;
private final ChannelFuture succeededFuture = new SucceededChannelFuture(this, null);
private final VoidChannelPromise voidPromise = new VoidChannelPromise(this, true);
private final VoidChannelPromise unsafeVoidPromise = new VoidChannelPromise(this, false);
private final CloseFuture closeFuture = new CloseFuture(this);
private volatile SocketAddress localAddress;
private volatile SocketAddress remoteAddress;
private volatile EventLoop eventLoop;
private volatile boolean registered;
/** Cache for the string representation of this channel */
private boolean strValActive;
private String strVal;
/**
* Creates a new instance.
*
* @param parent
* the parent of this channel. {@code null} if there's no parent.
*/
protected AbstractChannel(Channel parent) {
this.parent = parent;
unsafe = newUnsafe();
pipeline = new DefaultChannelPipeline(this);
}
@Override
public boolean isWritable() {
ChannelOutboundBuffer buf = unsafe.outboundBuffer();
return buf != null && buf.isWritable();
}
@Override
public Channel parent() {
return parent;
}
@Override
public ChannelPipeline pipeline() {
return pipeline;
}
@Override
public ByteBufAllocator alloc() {
return config().getAllocator();
}
@Override
public EventLoop eventLoop() {
EventLoop eventLoop = this.eventLoop;
if (eventLoop == null) {
throw new IllegalStateException("channel not registered to an event loop");
}
return eventLoop;
}
@Override
public SocketAddress localAddress() {
SocketAddress localAddress = this.localAddress;
if (localAddress == null) {
try {
this.localAddress = localAddress = unsafe().localAddress();
} catch (Throwable t) {
// Sometimes fails on a closed socket in Windows.
return null;
}
}
return localAddress;
}
protected void invalidateLocalAddress() {
localAddress = null;
}
@Override
public SocketAddress remoteAddress() {
SocketAddress remoteAddress = this.remoteAddress;
if (remoteAddress == null) {
try {
this.remoteAddress = remoteAddress = unsafe().remoteAddress();
} catch (Throwable t) {
// Sometimes fails on a closed socket in Windows.
return null;
}
}
return remoteAddress;
}
/**
* Reset the stored remoteAddress
*/
protected void invalidateRemoteAddress() {
remoteAddress = null;
}
@Override
public boolean isRegistered() {
return registered;
}
@Override
public ChannelFuture bind(SocketAddress localAddress) {
return pipeline.bind(localAddress);
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress) {
return pipeline.connect(remoteAddress);
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress) {
return pipeline.connect(remoteAddress, localAddress);
}
@Override
public ChannelFuture disconnect() {
return pipeline.disconnect();
}
@Override
public ChannelFuture close() {
return pipeline.close();
}
@Override
public ChannelFuture deregister() {
return pipeline.deregister();
}
@Override
public Channel flush() {
pipeline.flush();
return this;
}
@Override
public ChannelFuture bind(SocketAddress localAddress, ChannelPromise promise) {
return pipeline.bind(localAddress, promise);
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress, ChannelPromise promise) {
return pipeline.connect(remoteAddress, promise);
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) {
return pipeline.connect(remoteAddress, localAddress, promise);
}
@Override
public ChannelFuture disconnect(ChannelPromise promise) {
return pipeline.disconnect(promise);
}
@Override
public ChannelFuture close(ChannelPromise promise) {
return pipeline.close(promise);
}
@Override
public ChannelFuture deregister(ChannelPromise promise) {
return pipeline.deregister(promise);
}
@Override
public Channel read() {
pipeline.read();
return this;
}
@Override
public ChannelFuture write(Object msg) {
return pipeline.write(msg);
}
@Override
public ChannelFuture write(Object msg, ChannelPromise promise) {
return pipeline.write(msg, promise);
}
@Override
public ChannelFuture writeAndFlush(Object msg) {
return pipeline.writeAndFlush(msg);
}
@Override
public ChannelFuture writeAndFlush(Object msg, ChannelPromise promise) {
return pipeline.writeAndFlush(msg, promise);
}
@Override
public ChannelPromise newPromise() {
return new DefaultChannelPromise(this);
}
@Override
public ChannelProgressivePromise newProgressivePromise() {
return new DefaultChannelProgressivePromise(this);
}
@Override
public ChannelFuture newSucceededFuture() {
return succeededFuture;
}
@Override
public ChannelFuture newFailedFuture(Throwable cause) {
return new FailedChannelFuture(this, null, cause);
}
@Override
public ChannelFuture closeFuture() {
return closeFuture;
}
@Override
public Unsafe unsafe() {
return unsafe;
}
/**
* Create a new {@link AbstractUnsafe} instance which will be used for the life-time of the {@link Channel}
*/
protected abstract AbstractUnsafe newUnsafe();
/**
* Returns the ID of this channel.
*/
@Override
public final int hashCode() {
return (int) hashCode;
}
/**
* Returns {@code true} if and only if the specified object is identical
* with this channel (i.e: {@code this == o}).
*/
@Override
public final boolean equals(Object o) {
return this == o;
}
@Override
public final int compareTo(Channel o) {
if (this == o) {
return 0;
}
long ret = hashCode - o.hashCode();
if (ret > 0) {
return 1;
}
if (ret < 0) {
return -1;
}
ret = System.identityHashCode(this) - System.identityHashCode(o);
if (ret != 0) {
return (int) ret;
}
// Jackpot! - different objects with same hashes
throw new Error();
}
/**
* Returns the {@link String} representation of this channel. The returned
* string contains the {@linkplain #hashCode()} ID}, {@linkplain #localAddress() local address},
* and {@linkplain #remoteAddress() remote address} of this channel for
* easier identification.
*/
@Override
public String toString() {
boolean active = isActive();
if (strValActive == active && strVal != null) {
return strVal;
}
SocketAddress remoteAddr = remoteAddress();
SocketAddress localAddr = localAddress();
if (remoteAddr != null) {
SocketAddress srcAddr;
SocketAddress dstAddr;
if (parent == null) {
srcAddr = localAddr;
dstAddr = remoteAddr;
} else {
srcAddr = remoteAddr;
dstAddr = localAddr;
}
strVal = String.format("[id: 0x%08x, %s %s %s]", (int) hashCode, srcAddr, active? "=>" : ":>", dstAddr);
} else if (localAddr != null) {
strVal = String.format("[id: 0x%08x, %s]", (int) hashCode, localAddr);
} else {
strVal = String.format("[id: 0x%08x]", (int) hashCode);
}
strValActive = active;
return strVal;
}
@Override
public final ChannelPromise voidPromise() {
return voidPromise;
}
final MessageSizeEstimator.Handle estimatorHandle() {
if (estimatorHandle == null) {
estimatorHandle = config().getMessageSizeEstimator().newHandle();
}
return estimatorHandle;
}
/**
* {@link Unsafe} implementation which sub-classes must extend and use.
*/
protected abstract class AbstractUnsafe implements Unsafe {
private ChannelOutboundBuffer outboundBuffer = new ChannelOutboundBuffer(AbstractChannel.this);
private boolean inFlush0;
/** true if the channel has never been registered, false otherwise */
private boolean neverRegistered = true;
@Override
public final ChannelOutboundBuffer outboundBuffer() {
return outboundBuffer;
}
@Override
public final SocketAddress localAddress() {
return localAddress0();
}
@Override
public final SocketAddress remoteAddress() {
return remoteAddress0();
}
@Override
public final void register(EventLoop eventLoop, final ChannelPromise promise) {
if (eventLoop == null) {
throw new NullPointerException("eventLoop");
}
if (isRegistered()) {
promise.setFailure(new IllegalStateException("registered to an event loop already"));
return;
}
if (!isCompatible(eventLoop)) {
promise.setFailure(
new IllegalStateException("incompatible event loop type: " + eventLoop.getClass().getName()));
return;
}
AbstractChannel.this.eventLoop = eventLoop;
if (eventLoop.inEventLoop()) {
register0(promise);
} else {
try {
eventLoop.execute(new OneTimeTask() {
@Override
public void run() {
register0(promise);
}
});
} catch (Throwable t) {
logger.warn(
"Force-closing a channel whose registration task was not accepted by an event loop: {}",
AbstractChannel.this, t);
closeForcibly();
closeFuture.setClosed();
safeSetFailure(promise, t);
}
}
}
private void register0(ChannelPromise promise) {
try {
// check if the channel is still open as it could be closed in the mean time when the register
// call was outside of the eventLoop
if (!promise.setUncancellable() || !ensureOpen(promise)) {
return;
}
boolean firstRegistration = neverRegistered;
doRegister();
neverRegistered = false;
registered = true;
safeSetSuccess(promise);
pipeline.fireChannelRegistered();
// Only fire a channelActive if the channel has never been registered. This prevents firing
// multiple channel actives if the channel is deregistered and re-registered.
if (firstRegistration && isActive()) {
pipeline.fireChannelActive();
}
} catch (Throwable t) {
// Close the channel directly to avoid FD leak.
closeForcibly();
closeFuture.setClosed();
safeSetFailure(promise, t);
}
}
@Override
public final void bind(final SocketAddress localAddress, final ChannelPromise promise) {
if (!promise.setUncancellable() || !ensureOpen(promise)) {
return;
}
// See: https://github.com/netty/netty/issues/576
if (Boolean.TRUE.equals(config().getOption(ChannelOption.SO_BROADCAST)) &&
localAddress instanceof InetSocketAddress &&
!((InetSocketAddress) localAddress).getAddress().isAnyLocalAddress() &&
!PlatformDependent.isWindows() && !PlatformDependent.isRoot()) {
// Warn a user about the fact that a non-root user can't receive a
// broadcast packet on *nix if the socket is bound on non-wildcard address.
logger.warn(
"A non-root user can't receive a broadcast packet if the socket " +
"is not bound to a wildcard address; binding to a non-wildcard " +
"address (" + localAddress + ") anyway as requested.");
}
boolean wasActive = isActive();
try {
doBind(localAddress);
} catch (Throwable t) {
safeSetFailure(promise, t);
closeIfClosed();
return;
}
if (!wasActive && isActive()) {
invokeLater(new OneTimeTask() {
@Override
public void run() {
pipeline.fireChannelActive();
}
});
}
safeSetSuccess(promise);
}
@Override
public final void disconnect(final ChannelPromise promise) {
if (!promise.setUncancellable()) {
return;
}
boolean wasActive = isActive();
try {
doDisconnect();
} catch (Throwable t) {
safeSetFailure(promise, t);
closeIfClosed();
return;
}
if (wasActive && !isActive()) {
invokeLater(new OneTimeTask() {
@Override
public void run() {
pipeline.fireChannelInactive();
}
});
}
safeSetSuccess(promise);
closeIfClosed(); // doDisconnect() might have closed the channel
}
@Override
public final void close(final ChannelPromise promise) {
if (!promise.setUncancellable()) {
return;
}
if (outboundBuffer == null) {
// Only needed if no VoidChannelPromise.
if (!(promise instanceof VoidChannelPromise)) {
// This means close() was called before so we just register a listener and return
closeFuture.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
promise.setSuccess();
}
});
}
return;
}
if (closeFuture.isDone()) {
// Closed already.
safeSetSuccess(promise);
return;
}
final boolean wasActive = isActive();
final ChannelOutboundBuffer buffer = outboundBuffer;
outboundBuffer = null; // Disallow adding any messages and flushes to outboundBuffer.
Executor closeExecutor = closeExecutor();
if (closeExecutor != null) {
closeExecutor.execute(new OneTimeTask() {
@Override
public void run() {
try {
// Execute the close.
doClose0(promise);
} finally {
// Call invokeLater so closeAndDeregister is executed in the EventLoop again!
invokeLater(new OneTimeTask() {
@Override
public void run() {
// Fail all the queued messages
buffer.failFlushed(CLOSED_CHANNEL_EXCEPTION, false);
buffer.close(CLOSED_CHANNEL_EXCEPTION);
fireChannelInactiveAndDeregister(wasActive);
}
});
}
}
});
} else {
try {
// Close the channel and fail the queued messages in all cases.
doClose0(promise);
} finally {
// Fail all the queued messages.
buffer.failFlushed(CLOSED_CHANNEL_EXCEPTION, false);
buffer.close(CLOSED_CHANNEL_EXCEPTION);
}
if (inFlush0) {
invokeLater(new OneTimeTask() {
@Override
public void run() {
fireChannelInactiveAndDeregister(wasActive);
}
});
} else {
fireChannelInactiveAndDeregister(wasActive);
}
}
}
private void doClose0(ChannelPromise promise) {
try {
doClose();
closeFuture.setClosed();
safeSetSuccess(promise);
} catch (Throwable t) {
closeFuture.setClosed();
safeSetFailure(promise, t);
}
}
private void fireChannelInactiveAndDeregister(final boolean wasActive) {
if (wasActive && !isActive()) {
invokeLater(new OneTimeTask() {
@Override
public void run() {
pipeline.fireChannelInactive();
}
});
}
deregister(voidPromise());
}
@Override
public final void closeForcibly() {
try {
doClose();
} catch (Exception e) {
logger.warn("Failed to close a channel.", e);
}
}
@Override
public final void deregister(final ChannelPromise promise) {
if (!promise.setUncancellable()) {
return;
}
if (!registered) {
safeSetSuccess(promise);
return;
}
try {
doDeregister();
} catch (Throwable t) {
logger.warn("Unexpected exception occurred while deregistering a channel.", t);
} finally {
if (registered) {
registered = false;
invokeLater(new OneTimeTask() {
@Override
public void run() {
pipeline.fireChannelUnregistered();
}
});
safeSetSuccess(promise);
} else {
// Some transports like local and AIO does not allow the deregistration of
// an open channel. Their doDeregister() calls close(). Consequently,
// close() calls deregister() again - no need to fire channelUnregistered.
safeSetSuccess(promise);
}
}
}
@Override
public final void beginRead() {
if (!isActive()) {
return;
}
try {
doBeginRead();
} catch (final Exception e) {
invokeLater(new OneTimeTask() {
@Override
public void run() {
pipeline.fireExceptionCaught(e);
}
});
close(voidPromise());
}
}
@Override
public final void write(Object msg, ChannelPromise promise) {
ChannelOutboundBuffer outboundBuffer = this.outboundBuffer;
if (outboundBuffer == null) {
// If the outboundBuffer is null we know the channel was closed and so
// need to fail the future right away. If it is not null the handling of the rest
// will be done in flush0()
// See https://github.com/netty/netty/issues/2362
safeSetFailure(promise, CLOSED_CHANNEL_EXCEPTION);
// release message now to prevent resource-leak
ReferenceCountUtil.release(msg);
return;
}
int size;
try {
msg = filterOutboundMessage(msg);
size = estimatorHandle().size(msg);
if (size < 0) {
size = 0;
}
} catch (Throwable t) {
safeSetFailure(promise, t);
ReferenceCountUtil.release(msg);
return;
}
outboundBuffer.addMessage(msg, size, promise);
}
@Override
public final void flush() {
ChannelOutboundBuffer outboundBuffer = this.outboundBuffer;
if (outboundBuffer == null) {
return;
}
outboundBuffer.addFlush();
flush0();
}
protected void flush0() {
if (inFlush0) {
// Avoid re-entrance
return;
}
final ChannelOutboundBuffer outboundBuffer = this.outboundBuffer;
if (outboundBuffer == null || outboundBuffer.isEmpty()) {
return;
}
inFlush0 = true;
// Mark all pending write requests as failure if the channel is inactive.
if (!isActive()) {
try {
if (isOpen()) {
outboundBuffer.failFlushed(NOT_YET_CONNECTED_EXCEPTION, true);
} else {
// Do not trigger channelWritabilityChanged because the channel is closed already.
outboundBuffer.failFlushed(CLOSED_CHANNEL_EXCEPTION, false);
}
} finally {
inFlush0 = false;
}
return;
}
try {
doWrite(outboundBuffer);
} catch (Throwable t) {
boolean close = t instanceof IOException && config().isAutoClose();
// We do not want to trigger channelWritabilityChanged event if the channel is going to be closed.
outboundBuffer.failFlushed(t, !close);
if (close) {
close(voidPromise());
}
} finally {
inFlush0 = false;
}
}
@Override
public final ChannelPromise voidPromise() {
return unsafeVoidPromise;
}
protected final boolean ensureOpen(ChannelPromise promise) {
if (isOpen()) {
return true;
}
safeSetFailure(promise, CLOSED_CHANNEL_EXCEPTION);
return false;
}
/**
* Marks the specified {@code promise} as success. If the {@code promise} is done already, log a message.
*/
protected final void safeSetSuccess(ChannelPromise promise) {
if (!(promise instanceof VoidChannelPromise) && !promise.trySuccess()) {
logger.warn("Failed to mark a promise as success because it is done already: {}", promise);
}
}
/**
* Marks the specified {@code promise} as failure. If the {@code promise} is done already, log a message.
*/
protected final void safeSetFailure(ChannelPromise promise, Throwable cause) {
if (!(promise instanceof VoidChannelPromise) && !promise.tryFailure(cause)) {
logger.warn("Failed to mark a promise as failure because it's done already: {}", promise, cause);
}
}
protected final void closeIfClosed() {
if (isOpen()) {
return;
}
close(voidPromise());
}
private void invokeLater(Runnable task) {
try {
// This method is used by outbound operation implementations to trigger an inbound event later.
// They do not trigger an inbound event immediately because an outbound operation might have been
// triggered by another inbound event handler method. If fired immediately, the call stack
// will look like this for example:
//
// handlerA.inboundBufferUpdated() - (1) an inbound handler method closes a connection.
// -> handlerA.ctx.close()
// -> channel.unsafe.close()
// -> handlerA.channelInactive() - (2) another inbound handler method called while in (1) yet
//
// which means the execution of two inbound handler methods of the same handler overlap undesirably.
eventLoop().execute(task);
} catch (RejectedExecutionException e) {
logger.warn("Can't invoke task later as EventLoop rejected it", e);
}
}
/**
* Appends the remote address to the message of the exceptions caused by connection attempt failure.
*/
protected final Throwable annotateConnectException(Throwable cause, SocketAddress remoteAddress) {
if (cause instanceof ConnectException) {
Throwable newT = new ConnectException(cause.getMessage() + ": " + remoteAddress);
newT.setStackTrace(cause.getStackTrace());
cause = newT;
} else if (cause instanceof NoRouteToHostException) {
Throwable newT = new NoRouteToHostException(cause.getMessage() + ": " + remoteAddress);
newT.setStackTrace(cause.getStackTrace());
cause = newT;
} else if (cause instanceof SocketException) {
Throwable newT = new SocketException(cause.getMessage() + ": " + remoteAddress);
newT.setStackTrace(cause.getStackTrace());
cause = newT;
}
return cause;
}
/**
* @return {@link Executor} to execute {@link #doClose()} or {@code null} if it should be done in the
* {@link EventLoop}.
+
*/
protected Executor closeExecutor() {
return null;
}
}
/**
* Return {@code true} if the given {@link EventLoop} is compatible with this instance.
*/
protected abstract boolean isCompatible(EventLoop loop);
/**
* Returns the {@link SocketAddress} which is bound locally.
*/
protected abstract SocketAddress localAddress0();
/**
* Return the {@link SocketAddress} which the {@link Channel} is connected to.
*/
protected abstract SocketAddress remoteAddress0();
/**
* Is called after the {@link Channel} is registered with its {@link EventLoop} as part of the register process.
*
* Sub-classes may override this method
*/
protected void doRegister() throws Exception {
// NOOP
}
/**
* Bind the {@link Channel} to the {@link SocketAddress}
*/
protected abstract void doBind(SocketAddress localAddress) throws Exception;
/**
* Disconnect this {@link Channel} from its remote peer
*/
protected abstract void doDisconnect() throws Exception;
/**
* Close the {@link Channel}
*/
protected abstract void doClose() throws Exception;
/**
* Deregister the {@link Channel} from its {@link EventLoop}.
*
* Sub-classes may override this method
*/
protected void doDeregister() throws Exception {
// NOOP
}
/**
* Schedule a read operation.
*/
protected abstract void doBeginRead() throws Exception;
/**
* Flush the content of the given buffer to the remote peer.
*/
protected abstract void doWrite(ChannelOutboundBuffer in) throws Exception;
/**
* Invoked when a new message is added to a {@link ChannelOutboundBuffer} of this {@link AbstractChannel}, so that
* the {@link Channel} implementation converts the message to another. (e.g. heap buffer -> direct buffer)
*/
protected Object filterOutboundMessage(Object msg) throws Exception {
return msg;
}
static final class CloseFuture extends DefaultChannelPromise {
CloseFuture(AbstractChannel ch) {
super(ch);
}
@Override
public ChannelPromise setSuccess() {
throw new IllegalStateException();
}
@Override
public ChannelPromise setFailure(Throwable cause) {
throw new IllegalStateException();
}
@Override
public boolean trySuccess() {
throw new IllegalStateException();
}
@Override
public boolean tryFailure(Throwable cause) {
throw new IllegalStateException();
}
boolean setClosed() {
return super.trySuccess();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.resultSet.project;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.common.expression.PathSegment;
import org.apache.drill.common.expression.PathSegment.ArraySegment;
import org.apache.drill.common.expression.PathSegment.NameSegment;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.exec.record.metadata.TupleNameSpace;
/**
* Represents an explicit projection at some tuple level.
* <p>
* A column is projected if it is explicitly listed in the selection list.
* <p>
* If a column is a map, then the projection for the map's columns is based on
* two rules:
* <ol>
* <li>If the projection list includes at least one explicit mention of a map
* member, then include only those columns explicitly listed.</li>
* <li>If the projection at the parent level lists only the map column itself
* (which the projection can't know is a map), then assume this implies all
* columns, as if the entry where "map.*".</li>
* </ol>
* <p>
* Examples:<br>
* <code>m</code><br>
* If <code>m</code> turns out to be a map, project all members of
* <code>m</code>.<br>
* <code>m.a</code><br>
* Column <code>m</code> must be a map. Project only column <code>a</code>.<br>
* <code>m, m.a</code><br>
* Tricky case. We interpret this as projecting only the "a" element of map m.
* <p>
* The projection set is built from a list of columns, represented as
* {@link SchemaPath} objects, provided by the physical plan. The structure of
* <tt>SchemaPath</tt> is a bit awkward:
* <p>
* <ul>
* <li><tt>SchemaPath> is a wrapper for a column which directly holds the
* <tt>NameSegment</tt> for the top-level column.</li>
* <li><tt>NameSegment</tt> holds a name. This can be a top name such as
* `a`, or parts of a compound name such as `a`.`b`. Each <tt>NameSegment</tt>
* has a "child" that points to the option following parts of the name.</li>
* <li><PathSegment</tt> is the base class for the parts of a name.</tt>
* <li><tt>ArraySegment</tt> is the other kind of name part and represents
* an array index such as the "[1]" in `columns`[1].</li>
* <ul>
* The parser here consumes only names, this mechanism does not consider
* array indexes. As a result, there may be multiple projected columns that
* map to the same projection here: `columns`[1] and `columns`[2] both map to
* the name `columns`, for example.
*/
public class RequestedTupleImpl implements RequestedTuple {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RequestedTupleImpl.class);
private static final Collection<SchemaPath> PROJECT_ALL = Collections.singletonList(SchemaPath.STAR_COLUMN);
private final RequestedColumnImpl parent;
private final TupleNameSpace<RequestedColumn> projection = new TupleNameSpace<>();
public RequestedTupleImpl() {
parent = null;
}
public RequestedTupleImpl(RequestedColumnImpl parent) {
this.parent = parent;
}
public RequestedTupleImpl(List<RequestedColumn> cols) {
parent = null;
for (RequestedColumn col : cols) {
projection.add(col.name(), col);
}
}
@Override
public RequestedColumn get(String colName) {
return projection.get(colName.toLowerCase());
}
private RequestedColumnImpl getImpl(String colName) {
return (RequestedColumnImpl) get(colName);
}
@Override
public ProjectionType projectionType(String colName) {
RequestedColumn col = get(colName);
return col == null ? ProjectionType.UNPROJECTED : col.type();
}
@Override
public RequestedTuple mapProjection(String colName) {
RequestedColumnImpl col = getImpl(colName);
RequestedTuple mapProj = (col == null) ? null : col.mapProjection();
if (mapProj != null) {
return mapProj;
}
// No explicit information for the map. Members inherit the
// same projection as the map itself.
if (col != null) {
return col.projectAllMembers(true);
}
return ImpliedTupleRequest.NO_MEMBERS;
}
/**
* Create a requested tuple projection from a rewritten top-level
* projection list. The columns within the list have already been parsed to
* pick out arrays, maps and scalars. The list must not include the
* wildcard: a wildcard list must be passed in as a null list. An
* empty list means project nothing. Null list means project all, else
* project only the columns in the list.
*
* @param projList top-level, parsed columns
* @return the tuple projection for the top-leel row
*/
public static RequestedTuple build(List<RequestedColumn> projList) {
if (projList == null) {
return new ImpliedTupleRequest(true);
}
if (projList.isEmpty()) {
return ImpliedTupleRequest.NO_MEMBERS;
}
return new RequestedTupleImpl(projList);
}
/**
* Parse a projection list. The list should consist of a list of column names;
* or wildcards. An empty list means
* nothing is projected. A null list means everything is projected (that is, a
* null list here is equivalent to a wildcard in the SELECT statement.)
* <p>
* The projection list may include both a wildcard and column names (as in
* the case of implicit columns.) This results in a final list that both
* says that everything is projected, and provides the list of columns.
* <p>
* Parsing is used at two different times. First, to parse the list from
* the physical operator. This has the case above: an explicit wildcard
* and/or additional columns. Then, this class is used again to prepare the
* physical projection used when reading. In this case, wildcards should
* be removed, implicit columns pulled out, and just the list of read-level
* columns should remain.
*
* @param projList
* the list of projected columns, or null if no projection is to be
* done
* @return a projection set that implements the specified projection
*/
public static RequestedTuple parse(Collection<SchemaPath> projList) {
if (projList == null) {
projList = PROJECT_ALL;
}
else if (projList.isEmpty()) {
return ImpliedTupleRequest.NO_MEMBERS;
}
RequestedTupleImpl projSet = new RequestedTupleImpl();
for (SchemaPath col : projList) {
projSet.parseSegment(col.getRootSegment());
}
return projSet;
}
@Override
public void parseSegment(PathSegment pathSeg) {
if (pathSeg.isLastPath()) {
parseLeaf((NameSegment) pathSeg);
} else if (pathSeg.getChild().isArray()) {
parseArray((NameSegment) pathSeg);
} else {
parseInternal((NameSegment) pathSeg);
}
}
private void parseLeaf(NameSegment nameSeg) {
String name = nameSeg.getPath();
RequestedColumnImpl member = getImpl(name);
if (member == null) {
projection.add(name, new RequestedColumnImpl(this, name));
return;
}
if (member.isSimple() || member.isWildcard()) {
throw UserException
.validationError()
.message("Duplicate column in project list: %s",
member.fullName())
.build(logger);
}
if (member.isArray()) {
// Saw both a and a[x]. Occurs in project list.
// Project all elements.
member.projectAllElements();
return;
}
// Else the column is a known map.
assert member.isTuple();
// Allow both a.b (existing) and a (this column)
// Since we we know a is a map, and we've projected the
// whole map, modify the projection of the column to
// project the entire map.
member.projectAllMembers(true);
}
private void parseInternal(NameSegment nameSeg) {
String name = nameSeg.getPath();
RequestedColumnImpl member = getImpl(name);
RequestedTuple map;
if (member == null) {
// New member. Since this is internal, this new member
// must be a map.
member = new RequestedColumnImpl(this, name);
projection.add(name, member);
map = member.asTuple();
} else if (member.isTuple()) {
// Known map. Add to it.
map = member.asTuple();
} else {
// Member was previously projected by itself. We now
// know it is a map. So, project entire map. (Earlier
// we saw `a`. Now we see `a`.`b`.)
map = member.projectAllMembers(true);
}
map.parseSegment(nameSeg.getChild());
}
private void parseArray(NameSegment nameSeg) {
String name = nameSeg.getPath();
ArraySegment arraySeg = ((ArraySegment) nameSeg.getChild());
int index = arraySeg.getIndex();
RequestedColumnImpl member = getImpl(name);
if (member == null) {
member = new RequestedColumnImpl(this, name);
projection.add(name, member);
} else if (member.isSimple()) {
// Saw both a and a[x]. Occurs in project list.
// Project all elements.
member.projectAllElements();
return;
} else if (member.hasIndex(index)) {
throw UserException
.validationError()
.message("Duplicate array index in project list: %s[%d]",
member.fullName(), index)
.build(logger);
}
member.addIndex(index);
// Drills SQL parser does not support map arrays: a[0].c
// But, the SchemaPath does support them, so no harm in
// parsing them here.
if (! arraySeg.isLastPath()) {
parseInternal(nameSeg);
}
}
@Override
public List<RequestedColumn> projections() {
return projection.entries();
}
@Override
public void buildName(StringBuilder buf) {
if (parent != null) {
parent.buildName(buf);
}
}
/**
* Tuple projection type. This is a rough approximation. A scan-level projection
* may include both a wildcard and implicit columns. This form is best used
* in testing where such ambiguities do not apply.
*/
@Override
public TupleProjectionType type() {
if (projection.isEmpty()) {
return TupleProjectionType.NONE;
}
for (RequestedColumn col : projection) {
if (col.isWildcard()) {
return TupleProjectionType.ALL;
}
}
return TupleProjectionType.SOME;
}
}
| |
package io.cloudsoft.tosca.a4c.brooklyn;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.Collection;
import java.util.Map;
import java.util.function.BiFunction;
import org.apache.brooklyn.api.mgmt.classloading.BrooklynClassLoadingContext;
import org.apache.brooklyn.api.typereg.ManagedBundle;
import org.apache.brooklyn.api.typereg.OsgiBundleWithUrl;
import org.apache.brooklyn.core.mgmt.classloading.BrooklynClassLoadingContextSequential;
import org.apache.brooklyn.core.mgmt.classloading.OsgiBrooklynClassLoadingContext;
import org.apache.brooklyn.core.mgmt.ha.OsgiManager;
import org.apache.brooklyn.core.mgmt.internal.ManagementContextInternal;
import org.apache.brooklyn.core.typereg.UnsupportedTypePlanException;
import org.apache.brooklyn.util.core.ResourceUtils;
import org.apache.brooklyn.util.exceptions.Exceptions;
import org.apache.brooklyn.util.exceptions.UserFacingException;
import org.apache.brooklyn.util.stream.Streams;
import org.apache.brooklyn.util.text.Strings;
import org.apache.brooklyn.util.yaml.Yamls;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import alien4cloud.model.components.Csar;
import alien4cloud.tosca.ArchiveUploadService;
import alien4cloud.tosca.parser.ParsingErrorLevel;
import alien4cloud.tosca.parser.ParsingResult;
public class ToscaParser {
private static final Logger log = LoggerFactory.getLogger(ToscaParser.class);
private Uploader uploader;
private static class PlanTypeChecker {
Exception error = null;
Object obj = null;
boolean isTosca = false;
String csarLink;
// will set either error or obj, isTosca=true, or csarLink!=null (but not both),
// or both null meaning it's YAML but not TOSCA
public PlanTypeChecker(String plan, BrooklynClassLoadingContext context) {
try {
obj = Yamls.parseAll(plan).iterator().next();
} catch (Exception e) {
Exceptions.propagateIfFatal(e);
if (isToscaScore(plan)>0) {
error = new UserFacingException("Plan looks like it's meant to be TOSCA but it is not valid YAML", e);
log.debug("Invalid TOSCA YAML: "+error, error);
} else {
error = new UserFacingException("Plan does not look like TOSCA and is not valid YAML");
log.trace("Not YAML", e);
}
return;
}
if (!(obj instanceof Map)) {
// don't support just a URL pointing to CSAR (we used to) -- it needs to be a map with key csar_link
error = new UserFacingException("Plan does not look like TOSCA: parses as YAML but not as a map");
// log.trace("Not a map");
// // is it a one-line URL?
// plan = plan.trim();
// if (!plan.contains("\n") && Urls.isUrlWithProtocol(plan)) {
// csarLink = plan;
// } else {
//
// }
return;
}
if (isToscaScore((Map<?,?>)obj)>0) {
isTosca = true;
return;
}
if (((Map<?,?>)obj).size()==1) {
csarLink = (String) ((Map<?,?>)obj).get("csar_link");
if (csarLink!=null) {
return;
}
String toscaLink = (String) ((Map<?,?>)obj).get("tosca_link");
if (toscaLink!=null) {
ResourceUtils resLoader = context!=null ? new ResourceUtils(context) : new ResourceUtils(this);
obj = Yamls.parseAll(resLoader.getResourceAsString(toscaLink)).iterator().next();
isTosca = true;
}
}
error = new UserFacingException("Plan does not look like TOSCA or csar_link: parses as YAML map but not one this TOSCA engine understands");
}
}
public static double isToscaScore(Map<?,?> obj) {
return isToscaScore(obj, (map,s) -> map.containsKey(s));
}
public static double isToscaScore(String obj) {
return isToscaScore(obj, (plan,s) -> plan.contains(s));
}
public static <T> double isToscaScore(T obj, BiFunction<T,String,Boolean> contains) {
if (contains.apply(obj, "tosca_definitions_version")) return 1;
if (contains.apply(obj, "topology_template")) return 0.9;
if (contains.apply(obj, "topology_name")) return 0.5;
if (contains.apply(obj, "node_types")) return 0.5;
log.trace("Not TOSCA - no recognized keys");
return 0;
}
public ToscaParser(Uploader uploader) {
this.uploader = uploader;
}
public ParsingResult<Csar> parse(String plan, BrooklynClassLoadingContext context) {
ParsingResult<Csar> tp;
PlanTypeChecker type = new PlanTypeChecker(plan, context);
if (type.error!=null) {
throw Exceptions.propagate(type.error);
} else if (type.isTosca) {
tp = uploader.uploadSingleYaml(Streams.newInputStreamWithContents(plan), "submitted-tosca-plan");
} else if (type.csarLink != null) {
ResourceUtils resLoader = context!=null ? new ResourceUtils(context) : new ResourceUtils(this);
InputStream resourceFromUrl;
if (".".equals(type.csarLink)) {
try {
resourceFromUrl = getContainingBundleInputStream(context);
} catch (Exception e) {
throw Exceptions.propagateAnnotated("Could not load same-bundle csar_link relative to context "+context, e);
}
} else {
try {
resourceFromUrl = resLoader.getResourceFromUrl(type.csarLink);
} catch (Exception e) {
if (type.csarLink.startsWith("classpath:")) {
throw Exceptions.propagateAnnotated("Could not load csar_link "+type.csarLink+" relative to context "+context, e);
} else {
throw Exceptions.propagate(e);
}
}
}
tp = uploader.uploadArchive(resourceFromUrl, "submitted-tosca-archive");
} else {
// one of the above cases should be true, shouldn't come here...
throw new UnsupportedTypePlanException("Does not look like TOSCA");
}
if (ArchiveUploadService.hasError(tp, ParsingErrorLevel.ERROR)) {
throw new UserFacingException("Could not parse TOSCA plan: "+"\n "
+ Strings.join(tp.getContext().getParsingErrors(), "\n "));
}
return tp;
}
protected InputStream getContainingBundleInputStream(BrooklynClassLoadingContext context) {
// assume the containing bundle is the first item in the context
if (context==null) {
throw new IllegalStateException("No class-loading context");
}
if (!(context instanceof BrooklynClassLoadingContextSequential)) {
throw new IllegalStateException("Expected "+BrooklynClassLoadingContextSequential.class+" but had "+context.getClass());
}
BrooklynClassLoadingContextSequential seqCtx = (BrooklynClassLoadingContextSequential)context;
if (seqCtx.getPrimaries().isEmpty()) {
throw new IllegalStateException("No primaries set in context");
}
OsgiBrooklynClassLoadingContext osgiCtx = (OsgiBrooklynClassLoadingContext) seqCtx.getPrimaries().iterator().next();
Collection<? extends OsgiBundleWithUrl> bundles = osgiCtx.getBundles();
if (bundles.isEmpty()) {
throw new IllegalStateException("No bundles in first primary loading context "+osgiCtx);
}
OsgiBundleWithUrl catalogBundle = bundles.iterator().next();
String url = catalogBundle.getUrl();
if (url!=null) {
log.debug("Installing csar_link . from URL "+url);
return ResourceUtils.create(context, this, "TOSCA csar_link: .").getResourceFromUrl(url);
}
OsgiManager osgiMgr = ((ManagementContextInternal)osgiCtx.getManagementContext()).getOsgiManager().get();
ManagedBundle mb = osgiMgr.getManagedBundle(catalogBundle.getVersionedName());
File fn = osgiMgr.getBundleFile(mb);
if (fn==null) {
throw new IllegalStateException("No file available for first bundle "+catalogBundle+"/"+mb+" in first primary loading context "+osgiCtx);
}
try {
log.debug("Installing csar_link . from file "+fn);
return new FileInputStream(fn);
} catch (FileNotFoundException e) {
throw new IllegalStateException("Unable to find file for bundle "+mb+" ("+fn+")", e);
}
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.util;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.text.NumberFormat;
import java.util.Random;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.io.Writable;
/**
* Implements a <i>Bloom filter</i>, as defined by Bloom in 1970.
* <p>
* The Bloom filter is a data structure that was introduced in 1970 and that has
* been adopted by the networking research community in the past decade thanks
* to the bandwidth efficiencies that it offers for the transmission of set
* membership information between networked hosts. A sender encodes the
* information into a bit vector, the Bloom filter, that is more compact than a
* conventional representation. Computation and space costs for construction are
* linear in the number of elements. The receiver uses the filter to test
* whether various elements are members of the set. Though the filter will
* occasionally return a false positive, it will never return a false negative.
* When creating the filter, the sender can choose its desired point in a
* trade-off between the false positive rate and the size.
*
* <p>
* Originally inspired by <a href="http://www.one-lab.org">European Commission
* One-Lab Project 034819</a>.
*
* Bloom filters are very sensitive to the number of elements inserted into
* them. For HBase, the number of entries depends on the size of the data stored
* in the column. Currently the default region size is 256MB, so entry count ~=
* 256MB / (average value size for column). Despite this rule of thumb, there is
* no efficient way to calculate the entry count after compactions. Therefore,
* it is often easier to use a dynamic bloom filter that will add extra space
* instead of allowing the error rate to grow.
*
* ( http://www.eecs.harvard.edu/~michaelm/NEWWORK/postscripts/BloomFilterSurvey
* .pdf )
*
* m denotes the number of bits in the Bloom filter (bitSize) n denotes the
* number of elements inserted into the Bloom filter (maxKeys) k represents the
* number of hash functions used (nbHash) e represents the desired false
* positive rate for the bloom (err)
*
* If we fix the error rate (e) and know the number of entries, then the optimal
* bloom size m = -(n * ln(err) / (ln(2)^2) ~= n * ln(err) / ln(0.6185)
*
* The probability of false positives is minimized when k = m/n ln(2).
*
* @see BloomFilter The general behavior of a filter
*
* @see <a
* href="http://portal.acm.org/citation.cfm?id=362692&dl=ACM&coll=portal">
* Space/Time Trade-Offs in Hash Coding with Allowable Errors</a>
*/
@InterfaceAudience.Private
public class ByteBloomFilter implements BloomFilter, BloomFilterWriter {
/** Current file format version */
public static final int VERSION = 1;
/** Bytes (B) in the array. This actually has to fit into an int. */
protected long byteSize;
/** Number of hash functions */
protected int hashCount;
/** Hash type */
protected final int hashType;
/** Hash Function */
protected final Hash hash;
/** Keys currently in the bloom */
protected int keyCount;
/** Max Keys expected for the bloom */
protected int maxKeys;
/** Bloom bits */
protected ByteBuffer bloom;
/** Record separator for the Bloom filter statistics human-readable string */
public static final String STATS_RECORD_SEP = "; ";
/**
* Used in computing the optimal Bloom filter size. This approximately equals
* 0.480453.
*/
public static final double LOG2_SQUARED = Math.log(2) * Math.log(2);
/**
* A random number generator to use for "fake lookups" when testing to
* estimate the ideal false positive rate.
*/
private static Random randomGeneratorForTest;
/** Bit-value lookup array to prevent doing the same work over and over */
private static final byte [] bitvals = {
(byte) 0x01,
(byte) 0x02,
(byte) 0x04,
(byte) 0x08,
(byte) 0x10,
(byte) 0x20,
(byte) 0x40,
(byte) 0x80
};
/**
* Loads bloom filter meta data from file input.
* @param meta stored bloom meta data
* @throws IllegalArgumentException meta data is invalid
*/
public ByteBloomFilter(DataInput meta)
throws IOException, IllegalArgumentException {
this.byteSize = meta.readInt();
this.hashCount = meta.readInt();
this.hashType = meta.readInt();
this.keyCount = meta.readInt();
this.maxKeys = this.keyCount;
this.hash = Hash.getInstance(this.hashType);
if (hash == null) {
throw new IllegalArgumentException("Invalid hash type: " + hashType);
}
sanityCheck();
}
/**
* @param maxKeys
* @param errorRate
* @return the number of bits for a Bloom filter than can hold the given
* number of keys and provide the given error rate, assuming that the
* optimal number of hash functions is used and it does not have to
* be an integer.
*/
public static long computeBitSize(long maxKeys, double errorRate) {
return (long) Math.ceil(maxKeys * (-Math.log(errorRate) / LOG2_SQUARED));
}
/**
* The maximum number of keys we can put into a Bloom filter of a certain
* size to maintain the given error rate, assuming the number of hash
* functions is chosen optimally and does not even have to be an integer
* (hence the "ideal" in the function name).
*
* @param bitSize
* @param errorRate
* @return maximum number of keys that can be inserted into the Bloom filter
* @see #computeMaxKeys(long, double, int) for a more precise estimate
*/
public static long idealMaxKeys(long bitSize, double errorRate) {
// The reason we need to use floor here is that otherwise we might put
// more keys in a Bloom filter than is allowed by the target error rate.
return (long) (bitSize * (LOG2_SQUARED / -Math.log(errorRate)));
}
/**
* The maximum number of keys we can put into a Bloom filter of a certain
* size to get the given error rate, with the given number of hash functions.
*
* @param bitSize
* @param errorRate
* @param hashCount
* @return the maximum number of keys that can be inserted in a Bloom filter
* to maintain the target error rate, if the number of hash functions
* is provided.
*/
public static long computeMaxKeys(long bitSize, double errorRate,
int hashCount) {
return (long) (-bitSize * 1.0 / hashCount *
Math.log(1 - Math.exp(Math.log(errorRate) / hashCount)));
}
/**
* Computes the error rate for this Bloom filter, taking into account the
* actual number of hash functions and keys inserted. The return value of
* this function changes as a Bloom filter is being populated. Used for
* reporting the actual error rate of compound Bloom filters when writing
* them out.
*
* @return error rate for this particular Bloom filter
*/
public double actualErrorRate() {
return actualErrorRate(keyCount, byteSize * 8, hashCount);
}
/**
* Computes the actual error rate for the given number of elements, number
* of bits, and number of hash functions. Taken directly from the
* <a href=
* "http://en.wikipedia.org/wiki/Bloom_filter#Probability_of_false_positives"
* > Wikipedia Bloom filter article</a>.
*
* @param maxKeys
* @param bitSize
* @param functionCount
* @return the actual error rate
*/
public static double actualErrorRate(long maxKeys, long bitSize,
int functionCount) {
return Math.exp(Math.log(1 - Math.exp(-functionCount * maxKeys * 1.0
/ bitSize)) * functionCount);
}
/**
* Increases the given byte size of a Bloom filter until it can be folded by
* the given factor.
*
* @param bitSize
* @param foldFactor
* @return Foldable byte size
*/
public static int computeFoldableByteSize(long bitSize, int foldFactor) {
long byteSizeLong = (bitSize + 7) / 8;
int mask = (1 << foldFactor) - 1;
if ((mask & byteSizeLong) != 0) {
byteSizeLong >>= foldFactor;
++byteSizeLong;
byteSizeLong <<= foldFactor;
}
if (byteSizeLong > Integer.MAX_VALUE) {
throw new IllegalArgumentException("byteSize=" + byteSizeLong + " too "
+ "large for bitSize=" + bitSize + ", foldFactor=" + foldFactor);
}
return (int) byteSizeLong;
}
private static int optimalFunctionCount(int maxKeys, long bitSize) {
long i = bitSize / maxKeys;
double result = Math.ceil(Math.log(2) * i);
if (result > Integer.MAX_VALUE){
throw new IllegalArgumentException("result too large for integer value.");
}
return (int)result;
}
/** Private constructor used by other constructors. */
private ByteBloomFilter(int hashType) {
this.hashType = hashType;
this.hash = Hash.getInstance(hashType);
}
/**
* Determines & initializes bloom filter meta data from user config. Call
* {@link #allocBloom()} to allocate bloom filter data.
*
* @param maxKeys Maximum expected number of keys that will be stored in this
* bloom
* @param errorRate Desired false positive error rate. Lower rate = more
* storage required
* @param hashType Type of hash function to use
* @param foldFactor When finished adding entries, you may be able to 'fold'
* this bloom to save space. Tradeoff potentially excess bytes in
* bloom for ability to fold if keyCount is exponentially greater
* than maxKeys.
* @throws IllegalArgumentException
*/
public ByteBloomFilter(int maxKeys, double errorRate, int hashType,
int foldFactor) throws IllegalArgumentException {
this(hashType);
long bitSize = computeBitSize(maxKeys, errorRate);
hashCount = optimalFunctionCount(maxKeys, bitSize);
this.maxKeys = maxKeys;
// increase byteSize so folding is possible
byteSize = computeFoldableByteSize(bitSize, foldFactor);
sanityCheck();
}
/**
* Creates a Bloom filter of the given size.
*
* @param byteSizeHint the desired number of bytes for the Bloom filter bit
* array. Will be increased so that folding is possible.
* @param errorRate target false positive rate of the Bloom filter
* @param hashType Bloom filter hash function type
* @param foldFactor
* @return the new Bloom filter of the desired size
*/
public static ByteBloomFilter createBySize(int byteSizeHint,
double errorRate, int hashType, int foldFactor) {
ByteBloomFilter bbf = new ByteBloomFilter(hashType);
bbf.byteSize = computeFoldableByteSize(byteSizeHint * 8L, foldFactor);
long bitSize = bbf.byteSize * 8;
bbf.maxKeys = (int) idealMaxKeys(bitSize, errorRate);
bbf.hashCount = optimalFunctionCount(bbf.maxKeys, bitSize);
// Adjust max keys to bring error rate closer to what was requested,
// because byteSize was adjusted to allow for folding, and hashCount was
// rounded.
bbf.maxKeys = (int) computeMaxKeys(bitSize, errorRate, bbf.hashCount);
return bbf;
}
/**
* Creates another similar Bloom filter. Does not copy the actual bits, and
* sets the new filter's key count to zero.
*
* @return a Bloom filter with the same configuration as this
*/
public ByteBloomFilter createAnother() {
ByteBloomFilter bbf = new ByteBloomFilter(hashType);
bbf.byteSize = byteSize;
bbf.hashCount = hashCount;
bbf.maxKeys = maxKeys;
return bbf;
}
@Override
public void allocBloom() {
if (this.bloom != null) {
throw new IllegalArgumentException("can only create bloom once.");
}
this.bloom = ByteBuffer.allocate((int)this.byteSize);
assert this.bloom.hasArray();
}
void sanityCheck() throws IllegalArgumentException {
if(0 >= this.byteSize || this.byteSize > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Invalid byteSize: " + this.byteSize);
}
if(this.hashCount <= 0) {
throw new IllegalArgumentException("Hash function count must be > 0");
}
if (this.hash == null) {
throw new IllegalArgumentException("hashType must be known");
}
if (this.keyCount < 0) {
throw new IllegalArgumentException("must have positive keyCount");
}
}
void bloomCheck(ByteBuffer bloom) throws IllegalArgumentException {
if (this.byteSize != bloom.limit()) {
throw new IllegalArgumentException(
"Configured bloom length should match actual length");
}
}
public void add(byte [] buf) {
add(buf, 0, buf.length);
}
@Override
public void add(byte [] buf, int offset, int len) {
/*
* For faster hashing, use combinatorial generation
* http://www.eecs.harvard.edu/~kirsch/pubs/bbbf/esa06.pdf
*/
int hash1 = this.hash.hash(buf, offset, len, 0);
int hash2 = this.hash.hash(buf, offset, len, hash1);
for (int i = 0; i < this.hashCount; i++) {
long hashLoc = Math.abs((hash1 + i * hash2) % (this.byteSize * 8));
set(hashLoc);
}
++this.keyCount;
}
/** Should only be used in tests */
boolean contains(byte [] buf) {
return contains(buf, 0, buf.length, this.bloom);
}
/** Should only be used in tests */
boolean contains(byte [] buf, int offset, int length) {
return contains(buf, offset, length, bloom);
}
/** Should only be used in tests */
boolean contains(byte[] buf, ByteBuffer bloom) {
return contains(buf, 0, buf.length, bloom);
}
@Override
public boolean contains(byte[] buf, int offset, int length,
ByteBuffer theBloom) {
if (theBloom == null) {
// In a version 1 HFile Bloom filter data is stored in a separate meta
// block which is loaded on demand, but in version 2 it is pre-loaded.
// We want to use the same API in both cases.
theBloom = bloom;
}
if (theBloom.limit() != byteSize) {
throw new IllegalArgumentException("Bloom does not match expected size:"
+ " theBloom.limit()=" + theBloom.limit() + ", byteSize=" + byteSize);
}
return contains(buf, offset, length, theBloom.array(),
theBloom.arrayOffset(), (int) byteSize, hash, hashCount);
}
public static boolean contains(byte[] buf, int offset, int length,
byte[] bloomArray, int bloomOffset, int bloomSize, Hash hash,
int hashCount) {
int hash1 = hash.hash(buf, offset, length, 0);
int hash2 = hash.hash(buf, offset, length, hash1);
int bloomBitSize = bloomSize << 3;
if (randomGeneratorForTest == null) {
// Production mode.
int compositeHash = hash1;
for (int i = 0; i < hashCount; i++) {
int hashLoc = Math.abs(compositeHash % bloomBitSize);
compositeHash += hash2;
if (!get(hashLoc, bloomArray, bloomOffset)) {
return false;
}
}
} else {
// Test mode with "fake lookups" to estimate "ideal false positive rate".
for (int i = 0; i < hashCount; i++) {
int hashLoc = randomGeneratorForTest.nextInt(bloomBitSize);
if (!get(hashLoc, bloomArray, bloomOffset)){
return false;
}
}
}
return true;
}
//---------------------------------------------------------------------------
/** Private helpers */
/**
* Set the bit at the specified index to 1.
*
* @param pos index of bit
*/
void set(long pos) {
int bytePos = (int)(pos / 8);
int bitPos = (int)(pos % 8);
byte curByte = bloom.get(bytePos);
curByte |= bitvals[bitPos];
bloom.put(bytePos, curByte);
}
/**
* Check if bit at specified index is 1.
*
* @param pos index of bit
* @return true if bit at specified index is 1, false if 0.
*/
static boolean get(int pos, byte[] bloomArray, int bloomOffset) {
int bytePos = pos >> 3; //pos / 8
int bitPos = pos & 0x7; //pos % 8
byte curByte = bloomArray[bloomOffset + bytePos];
curByte &= bitvals[bitPos];
return (curByte != 0);
}
@Override
public long getKeyCount() {
return keyCount;
}
@Override
public long getMaxKeys() {
return maxKeys;
}
@Override
public long getByteSize() {
return byteSize;
}
public int getHashType() {
return hashType;
}
@Override
public void compactBloom() {
// see if the actual size is exponentially smaller than expected.
if (this.keyCount > 0 && this.bloom.hasArray()) {
int pieces = 1;
int newByteSize = (int)this.byteSize;
int newMaxKeys = this.maxKeys;
// while exponentially smaller & folding is lossless
while ( (newByteSize & 1) == 0 && newMaxKeys > (this.keyCount<<1) ) {
pieces <<= 1;
newByteSize >>= 1;
newMaxKeys >>= 1;
}
// if we should fold these into pieces
if (pieces > 1) {
byte[] array = this.bloom.array();
int start = this.bloom.arrayOffset();
int end = start + newByteSize;
int off = end;
for(int p = 1; p < pieces; ++p) {
for(int pos = start; pos < end; ++pos) {
array[pos] |= array[off++];
}
}
// folding done, only use a subset of this array
this.bloom.rewind();
this.bloom.limit(newByteSize);
this.bloom = this.bloom.slice();
this.byteSize = newByteSize;
this.maxKeys = newMaxKeys;
}
}
}
//---------------------------------------------------------------------------
/**
* Writes just the bloom filter to the output array
* @param out OutputStream to place bloom
* @throws IOException Error writing bloom array
*/
public void writeBloom(final DataOutput out) throws IOException {
if (!this.bloom.hasArray()) {
throw new IOException("Only writes ByteBuffer with underlying array.");
}
out.write(bloom.array(), bloom.arrayOffset(), bloom.limit());
}
@Override
public Writable getMetaWriter() {
return new MetaWriter();
}
@Override
public Writable getDataWriter() {
return new DataWriter();
}
private class MetaWriter implements Writable {
protected MetaWriter() {}
@Override
public void readFields(DataInput arg0) throws IOException {
throw new IOException("Cant read with this class.");
}
@Override
public void write(DataOutput out) throws IOException {
out.writeInt(VERSION);
out.writeInt((int) byteSize);
out.writeInt(hashCount);
out.writeInt(hashType);
out.writeInt(keyCount);
}
}
private class DataWriter implements Writable {
protected DataWriter() {}
@Override
public void readFields(DataInput arg0) throws IOException {
throw new IOException("Cant read with this class.");
}
@Override
public void write(DataOutput out) throws IOException {
writeBloom(out);
}
}
public int getHashCount() {
return hashCount;
}
@Override
public boolean supportsAutoLoading() {
return bloom != null;
}
public static void setFakeLookupMode(boolean enabled) {
if (enabled) {
randomGeneratorForTest = new Random(283742987L);
} else {
randomGeneratorForTest = null;
}
}
/**
* {@inheritDoc}
* Just concatenate row and column by default. May return the original row
* buffer if the column qualifier is empty.
*/
@Override
public byte[] createBloomKey(byte[] rowBuf, int rowOffset, int rowLen,
byte[] qualBuf, int qualOffset, int qualLen) {
// Optimize the frequent case when only the row is provided.
if (qualLen <= 0 && rowOffset == 0 && rowLen == rowBuf.length)
return rowBuf;
byte [] result = new byte[rowLen + qualLen];
System.arraycopy(rowBuf, rowOffset, result, 0, rowLen);
if (qualLen > 0)
System.arraycopy(qualBuf, qualOffset, result, rowLen, qualLen);
return result;
}
@Override
public KVComparator getComparator() {
// return Bytes.BYTES_RAWCOMPARATOR;
return KeyValue.RAW_COMPARATOR;
}
/**
* A human-readable string with statistics for the given Bloom filter.
*
* @param bloomFilter the Bloom filter to output statistics for;
* @return a string consisting of "<key>: <value>" parts
* separated by {@link #STATS_RECORD_SEP}.
*/
public static String formatStats(BloomFilterBase bloomFilter) {
StringBuilder sb = new StringBuilder();
long k = bloomFilter.getKeyCount();
long m = bloomFilter.getMaxKeys();
sb.append("BloomSize: " + bloomFilter.getByteSize() + STATS_RECORD_SEP);
sb.append("No of Keys in bloom: " + k + STATS_RECORD_SEP);
sb.append("Max Keys for bloom: " + m);
if (m > 0) {
sb.append(STATS_RECORD_SEP + "Percentage filled: "
+ NumberFormat.getPercentInstance().format(k * 1.0 / m));
}
return sb.toString();
}
@Override
public String toString() {
return formatStats(this) + STATS_RECORD_SEP + "Actual error rate: "
+ String.format("%.8f", actualErrorRate());
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2/user_event.proto
package com.google.cloud.retail.v2;
/**
*
*
* <pre>
* A transaction represents the entire purchase transaction.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.PurchaseTransaction}
*/
public final class PurchaseTransaction extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2.PurchaseTransaction)
PurchaseTransactionOrBuilder {
private static final long serialVersionUID = 0L;
// Use PurchaseTransaction.newBuilder() to construct.
private PurchaseTransaction(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PurchaseTransaction() {
id_ = "";
currencyCode_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new PurchaseTransaction();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private PurchaseTransaction(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
id_ = s;
break;
}
case 21:
{
revenue_ = input.readFloat();
break;
}
case 29:
{
tax_ = input.readFloat();
break;
}
case 37:
{
cost_ = input.readFloat();
break;
}
case 42:
{
java.lang.String s = input.readStringRequireUtf8();
currencyCode_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.UserEventProto
.internal_static_google_cloud_retail_v2_PurchaseTransaction_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.UserEventProto
.internal_static_google_cloud_retail_v2_PurchaseTransaction_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.PurchaseTransaction.class,
com.google.cloud.retail.v2.PurchaseTransaction.Builder.class);
}
public static final int ID_FIELD_NUMBER = 1;
private volatile java.lang.Object id_;
/**
*
*
* <pre>
* The transaction ID with a length limit of 128 characters.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The id.
*/
@java.lang.Override
public java.lang.String getId() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
id_ = s;
return s;
}
}
/**
*
*
* <pre>
* The transaction ID with a length limit of 128 characters.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The bytes for id.
*/
@java.lang.Override
public com.google.protobuf.ByteString getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
id_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REVENUE_FIELD_NUMBER = 2;
private float revenue_;
/**
*
*
* <pre>
* Required. Total non-zero revenue or grand total associated with the
* transaction. This value include shipping, tax, or other adjustments to
* total revenue that you want to include as part of your revenue
* calculations.
* </pre>
*
* <code>float revenue = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The revenue.
*/
@java.lang.Override
public float getRevenue() {
return revenue_;
}
public static final int TAX_FIELD_NUMBER = 3;
private float tax_;
/**
*
*
* <pre>
* All the taxes associated with the transaction.
* </pre>
*
* <code>float tax = 3;</code>
*
* @return The tax.
*/
@java.lang.Override
public float getTax() {
return tax_;
}
public static final int COST_FIELD_NUMBER = 4;
private float cost_;
/**
*
*
* <pre>
* All the costs associated with the products. These can be manufacturing
* costs, shipping expenses not borne by the end user, or any other costs,
* such that:
* * Profit = [revenue][google.cloud.retail.v2.PurchaseTransaction.revenue] -
* [tax][google.cloud.retail.v2.PurchaseTransaction.tax] -
* [cost][google.cloud.retail.v2.PurchaseTransaction.cost]
* </pre>
*
* <code>float cost = 4;</code>
*
* @return The cost.
*/
@java.lang.Override
public float getCost() {
return cost_;
}
public static final int CURRENCY_CODE_FIELD_NUMBER = 5;
private volatile java.lang.Object currencyCode_;
/**
*
*
* <pre>
* Required. Currency code. Use three-character ISO-4217 code.
* </pre>
*
* <code>string currency_code = 5 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The currencyCode.
*/
@java.lang.Override
public java.lang.String getCurrencyCode() {
java.lang.Object ref = currencyCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
currencyCode_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Currency code. Use three-character ISO-4217 code.
* </pre>
*
* <code>string currency_code = 5 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for currencyCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString getCurrencyCodeBytes() {
java.lang.Object ref = currencyCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
currencyCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_);
}
if (revenue_ != 0F) {
output.writeFloat(2, revenue_);
}
if (tax_ != 0F) {
output.writeFloat(3, tax_);
}
if (cost_ != 0F) {
output.writeFloat(4, cost_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(currencyCode_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, currencyCode_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_);
}
if (revenue_ != 0F) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(2, revenue_);
}
if (tax_ != 0F) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(3, tax_);
}
if (cost_ != 0F) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(4, cost_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(currencyCode_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, currencyCode_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2.PurchaseTransaction)) {
return super.equals(obj);
}
com.google.cloud.retail.v2.PurchaseTransaction other =
(com.google.cloud.retail.v2.PurchaseTransaction) obj;
if (!getId().equals(other.getId())) return false;
if (java.lang.Float.floatToIntBits(getRevenue())
!= java.lang.Float.floatToIntBits(other.getRevenue())) return false;
if (java.lang.Float.floatToIntBits(getTax()) != java.lang.Float.floatToIntBits(other.getTax()))
return false;
if (java.lang.Float.floatToIntBits(getCost())
!= java.lang.Float.floatToIntBits(other.getCost())) return false;
if (!getCurrencyCode().equals(other.getCurrencyCode())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ID_FIELD_NUMBER;
hash = (53 * hash) + getId().hashCode();
hash = (37 * hash) + REVENUE_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getRevenue());
hash = (37 * hash) + TAX_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getTax());
hash = (37 * hash) + COST_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getCost());
hash = (37 * hash) + CURRENCY_CODE_FIELD_NUMBER;
hash = (53 * hash) + getCurrencyCode().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2.PurchaseTransaction parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.PurchaseTransaction parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.PurchaseTransaction parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.PurchaseTransaction parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.PurchaseTransaction parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.PurchaseTransaction parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.PurchaseTransaction parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.PurchaseTransaction parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.PurchaseTransaction parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.PurchaseTransaction parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.PurchaseTransaction parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.PurchaseTransaction parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.retail.v2.PurchaseTransaction prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A transaction represents the entire purchase transaction.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.PurchaseTransaction}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2.PurchaseTransaction)
com.google.cloud.retail.v2.PurchaseTransactionOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.UserEventProto
.internal_static_google_cloud_retail_v2_PurchaseTransaction_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.UserEventProto
.internal_static_google_cloud_retail_v2_PurchaseTransaction_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.PurchaseTransaction.class,
com.google.cloud.retail.v2.PurchaseTransaction.Builder.class);
}
// Construct using com.google.cloud.retail.v2.PurchaseTransaction.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
id_ = "";
revenue_ = 0F;
tax_ = 0F;
cost_ = 0F;
currencyCode_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2.UserEventProto
.internal_static_google_cloud_retail_v2_PurchaseTransaction_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2.PurchaseTransaction getDefaultInstanceForType() {
return com.google.cloud.retail.v2.PurchaseTransaction.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2.PurchaseTransaction build() {
com.google.cloud.retail.v2.PurchaseTransaction result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2.PurchaseTransaction buildPartial() {
com.google.cloud.retail.v2.PurchaseTransaction result =
new com.google.cloud.retail.v2.PurchaseTransaction(this);
result.id_ = id_;
result.revenue_ = revenue_;
result.tax_ = tax_;
result.cost_ = cost_;
result.currencyCode_ = currencyCode_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2.PurchaseTransaction) {
return mergeFrom((com.google.cloud.retail.v2.PurchaseTransaction) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2.PurchaseTransaction other) {
if (other == com.google.cloud.retail.v2.PurchaseTransaction.getDefaultInstance()) return this;
if (!other.getId().isEmpty()) {
id_ = other.id_;
onChanged();
}
if (other.getRevenue() != 0F) {
setRevenue(other.getRevenue());
}
if (other.getTax() != 0F) {
setTax(other.getTax());
}
if (other.getCost() != 0F) {
setCost(other.getCost());
}
if (!other.getCurrencyCode().isEmpty()) {
currencyCode_ = other.currencyCode_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.retail.v2.PurchaseTransaction parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.retail.v2.PurchaseTransaction) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object id_ = "";
/**
*
*
* <pre>
* The transaction ID with a length limit of 128 characters.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The id.
*/
public java.lang.String getId() {
java.lang.Object ref = id_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
id_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The transaction ID with a length limit of 128 characters.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The bytes for id.
*/
public com.google.protobuf.ByteString getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
id_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The transaction ID with a length limit of 128 characters.
* </pre>
*
* <code>string id = 1;</code>
*
* @param value The id to set.
* @return This builder for chaining.
*/
public Builder setId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
id_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The transaction ID with a length limit of 128 characters.
* </pre>
*
* <code>string id = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearId() {
id_ = getDefaultInstance().getId();
onChanged();
return this;
}
/**
*
*
* <pre>
* The transaction ID with a length limit of 128 characters.
* </pre>
*
* <code>string id = 1;</code>
*
* @param value The bytes for id to set.
* @return This builder for chaining.
*/
public Builder setIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
id_ = value;
onChanged();
return this;
}
private float revenue_;
/**
*
*
* <pre>
* Required. Total non-zero revenue or grand total associated with the
* transaction. This value include shipping, tax, or other adjustments to
* total revenue that you want to include as part of your revenue
* calculations.
* </pre>
*
* <code>float revenue = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The revenue.
*/
@java.lang.Override
public float getRevenue() {
return revenue_;
}
/**
*
*
* <pre>
* Required. Total non-zero revenue or grand total associated with the
* transaction. This value include shipping, tax, or other adjustments to
* total revenue that you want to include as part of your revenue
* calculations.
* </pre>
*
* <code>float revenue = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The revenue to set.
* @return This builder for chaining.
*/
public Builder setRevenue(float value) {
revenue_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Total non-zero revenue or grand total associated with the
* transaction. This value include shipping, tax, or other adjustments to
* total revenue that you want to include as part of your revenue
* calculations.
* </pre>
*
* <code>float revenue = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearRevenue() {
revenue_ = 0F;
onChanged();
return this;
}
private float tax_;
/**
*
*
* <pre>
* All the taxes associated with the transaction.
* </pre>
*
* <code>float tax = 3;</code>
*
* @return The tax.
*/
@java.lang.Override
public float getTax() {
return tax_;
}
/**
*
*
* <pre>
* All the taxes associated with the transaction.
* </pre>
*
* <code>float tax = 3;</code>
*
* @param value The tax to set.
* @return This builder for chaining.
*/
public Builder setTax(float value) {
tax_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* All the taxes associated with the transaction.
* </pre>
*
* <code>float tax = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearTax() {
tax_ = 0F;
onChanged();
return this;
}
private float cost_;
/**
*
*
* <pre>
* All the costs associated with the products. These can be manufacturing
* costs, shipping expenses not borne by the end user, or any other costs,
* such that:
* * Profit = [revenue][google.cloud.retail.v2.PurchaseTransaction.revenue] -
* [tax][google.cloud.retail.v2.PurchaseTransaction.tax] -
* [cost][google.cloud.retail.v2.PurchaseTransaction.cost]
* </pre>
*
* <code>float cost = 4;</code>
*
* @return The cost.
*/
@java.lang.Override
public float getCost() {
return cost_;
}
/**
*
*
* <pre>
* All the costs associated with the products. These can be manufacturing
* costs, shipping expenses not borne by the end user, or any other costs,
* such that:
* * Profit = [revenue][google.cloud.retail.v2.PurchaseTransaction.revenue] -
* [tax][google.cloud.retail.v2.PurchaseTransaction.tax] -
* [cost][google.cloud.retail.v2.PurchaseTransaction.cost]
* </pre>
*
* <code>float cost = 4;</code>
*
* @param value The cost to set.
* @return This builder for chaining.
*/
public Builder setCost(float value) {
cost_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* All the costs associated with the products. These can be manufacturing
* costs, shipping expenses not borne by the end user, or any other costs,
* such that:
* * Profit = [revenue][google.cloud.retail.v2.PurchaseTransaction.revenue] -
* [tax][google.cloud.retail.v2.PurchaseTransaction.tax] -
* [cost][google.cloud.retail.v2.PurchaseTransaction.cost]
* </pre>
*
* <code>float cost = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearCost() {
cost_ = 0F;
onChanged();
return this;
}
private java.lang.Object currencyCode_ = "";
/**
*
*
* <pre>
* Required. Currency code. Use three-character ISO-4217 code.
* </pre>
*
* <code>string currency_code = 5 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The currencyCode.
*/
public java.lang.String getCurrencyCode() {
java.lang.Object ref = currencyCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
currencyCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Currency code. Use three-character ISO-4217 code.
* </pre>
*
* <code>string currency_code = 5 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for currencyCode.
*/
public com.google.protobuf.ByteString getCurrencyCodeBytes() {
java.lang.Object ref = currencyCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
currencyCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Currency code. Use three-character ISO-4217 code.
* </pre>
*
* <code>string currency_code = 5 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The currencyCode to set.
* @return This builder for chaining.
*/
public Builder setCurrencyCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
currencyCode_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Currency code. Use three-character ISO-4217 code.
* </pre>
*
* <code>string currency_code = 5 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearCurrencyCode() {
currencyCode_ = getDefaultInstance().getCurrencyCode();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Currency code. Use three-character ISO-4217 code.
* </pre>
*
* <code>string currency_code = 5 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for currencyCode to set.
* @return This builder for chaining.
*/
public Builder setCurrencyCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
currencyCode_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2.PurchaseTransaction)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2.PurchaseTransaction)
private static final com.google.cloud.retail.v2.PurchaseTransaction DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2.PurchaseTransaction();
}
public static com.google.cloud.retail.v2.PurchaseTransaction getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PurchaseTransaction> PARSER =
new com.google.protobuf.AbstractParser<PurchaseTransaction>() {
@java.lang.Override
public PurchaseTransaction parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PurchaseTransaction(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<PurchaseTransaction> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PurchaseTransaction> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2.PurchaseTransaction getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lucene.queryparser.classic;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.util.Version;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.support.QueryParsers;
import com.google.common.collect.ImmutableMap;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded;
/**
* A query parser that uses the {@link MapperService} in order to build smarter
* queries based on the mapping information.
* <p/>
* <p>Also breaks fields with [type].[name] into a boolean query that must include the type
* as well as the query on the name.
*/
public class MapperQueryParser extends QueryParser {
public static final ImmutableMap<String, FieldQueryExtension> fieldQueryExtensions;
static {
fieldQueryExtensions = ImmutableMap.<String, FieldQueryExtension>builder()
.put(ExistsFieldQueryExtension.NAME, new ExistsFieldQueryExtension())
.put(MissingFieldQueryExtension.NAME, new MissingFieldQueryExtension())
.build();
}
private final QueryParseContext parseContext;
private QueryParserSettings settings;
private Analyzer quoteAnalyzer;
private boolean forcedAnalyzer;
private boolean forcedQuoteAnalyzer;
private MappedFieldType currentFieldType;
private boolean analyzeWildcard;
private String quoteFieldSuffix;
public MapperQueryParser(QueryParseContext parseContext) {
super(null, null);
this.parseContext = parseContext;
}
public void reset(QueryParserSettings settings) {
this.settings = settings;
this.field = settings.defaultField();
if (settings.fields() != null) {
if (settings.fields.size() == 1) {
// just mark it as the default field
this.field = settings.fields().get(0);
} else {
// otherwise, we need to have the default field being null...
this.field = null;
}
}
this.forcedAnalyzer = settings.forcedAnalyzer() != null;
this.setAnalyzer(forcedAnalyzer ? settings.forcedAnalyzer() : settings.defaultAnalyzer());
if (settings.forcedQuoteAnalyzer() != null) {
this.forcedQuoteAnalyzer = true;
this.quoteAnalyzer = settings.forcedQuoteAnalyzer();
} else if (forcedAnalyzer) {
this.forcedQuoteAnalyzer = true;
this.quoteAnalyzer = settings.forcedAnalyzer();
} else {
this.forcedAnalyzer = false;
this.quoteAnalyzer = settings.defaultQuoteAnalyzer();
}
this.quoteFieldSuffix = settings.quoteFieldSuffix();
setMultiTermRewriteMethod(settings.rewriteMethod());
setEnablePositionIncrements(settings.enablePositionIncrements());
setAutoGeneratePhraseQueries(settings.autoGeneratePhraseQueries());
setMaxDeterminizedStates(settings.maxDeterminizedStates());
setAllowLeadingWildcard(settings.allowLeadingWildcard());
setLowercaseExpandedTerms(settings.lowercaseExpandedTerms());
setPhraseSlop(settings.phraseSlop());
setDefaultOperator(settings.defaultOperator());
setFuzzyMinSim(settings.getFuzziness().asFloat());
setFuzzyPrefixLength(settings.fuzzyPrefixLength());
setLocale(settings.locale());
this.analyzeWildcard = settings.analyzeWildcard();
}
/**
* We override this one so we can get the fuzzy part to be treated as string, so people can do: "age:10~5" or "timestamp:2012-10-10~5d"
*/
@Override
Query handleBareFuzzy(String qfield, Token fuzzySlop, String termImage) throws ParseException {
if (fuzzySlop.image.length() == 1) {
return getFuzzyQuery(qfield, termImage, Float.toString(fuzzyMinSim));
}
return getFuzzyQuery(qfield, termImage, fuzzySlop.image.substring(1));
}
@Override
protected Query newTermQuery(Term term) {
if (currentFieldType != null) {
Query termQuery = currentFieldType.queryStringTermQuery(term);
if (termQuery != null) {
return termQuery;
}
}
return super.newTermQuery(term);
}
@Override
protected Query newMatchAllDocsQuery() {
return Queries.newMatchAllQuery();
}
@Override
public Query getFieldQuery(String field, String queryText, boolean quoted) throws ParseException {
FieldQueryExtension fieldQueryExtension = fieldQueryExtensions.get(field);
if (fieldQueryExtension != null) {
return fieldQueryExtension.query(parseContext, queryText);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getFieldQuerySingle(fields.iterator().next(), queryText, quoted);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getFieldQuerySingle(mField, queryText, quoted);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = getFieldQuerySingle(mField, queryText, quoted);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return getFieldQuerySingle(field, queryText, quoted);
}
}
private Query getFieldQuerySingle(String field, String queryText, boolean quoted) throws ParseException {
if (!quoted && queryText.length() > 1) {
if (queryText.charAt(0) == '>') {
if (queryText.length() > 2) {
if (queryText.charAt(1) == '=') {
return getRangeQuerySingle(field, queryText.substring(2), null, true, true);
}
}
return getRangeQuerySingle(field, queryText.substring(1), null, false, true);
} else if (queryText.charAt(0) == '<') {
if (queryText.length() > 2) {
if (queryText.charAt(1) == '=') {
return getRangeQuerySingle(field, null, queryText.substring(2), true, true);
}
}
return getRangeQuerySingle(field, null, queryText.substring(1), true, false);
}
}
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
if (quoted) {
setAnalyzer(quoteAnalyzer);
if (quoteFieldSuffix != null) {
currentFieldType = parseContext.fieldMapper(field + quoteFieldSuffix);
}
}
if (currentFieldType == null) {
currentFieldType = parseContext.fieldMapper(field);
}
if (currentFieldType != null) {
if (quoted) {
if (!forcedQuoteAnalyzer) {
setAnalyzer(parseContext.getSearchQuoteAnalyzer(currentFieldType));
}
} else {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
}
}
if (currentFieldType != null) {
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
try {
query = currentFieldType.termQuery(queryText, parseContext);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
} else {
throw e;
}
}
}
if (query == null) {
query = super.getFieldQuery(currentFieldType.names().indexName(), queryText, quoted);
}
return query;
}
}
return super.getFieldQuery(field, queryText, quoted);
} finally {
setAnalyzer(oldAnalyzer);
}
}
@Override
protected Query getFieldQuery(String field, String queryText, int slop) throws ParseException {
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = super.getFieldQuery(mField, queryText, slop);
if (q != null) {
added = true;
applyBoost(mField, q);
q = applySlop(q, slop);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = super.getFieldQuery(mField, queryText, slop);
if (q != null) {
applyBoost(mField, q);
q = applySlop(q, slop);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return super.getFieldQuery(field, queryText, slop);
}
}
@Override
protected Query getRangeQuery(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) throws ParseException {
if ("*".equals(part1)) {
part1 = null;
}
if ("*".equals(part2)) {
part2 = null;
}
Collection<String> fields = extractMultiFields(field);
if (fields == null) {
return getRangeQuerySingle(field, part1, part2, startInclusive, endInclusive);
}
if (fields.size() == 1) {
return getRangeQuerySingle(fields.iterator().next(), part1, part2, startInclusive, endInclusive);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
}
private Query getRangeQuerySingle(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
if (lowercaseExpandedTerms && !currentFieldType.isNumeric()) {
part1 = part1 == null ? null : part1.toLowerCase(locale);
part2 = part2 == null ? null : part2.toLowerCase(locale);
}
try {
Query rangeQuery;
if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) {
DateFieldMapper.DateFieldType dateFieldType = (DateFieldMapper.DateFieldType) this.currentFieldType;
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null);
} else {
rangeQuery = currentFieldType.rangeQuery(part1, part2, startInclusive, endInclusive);
}
return rangeQuery;
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
}
}
return newRangeQuery(field, part1, part2, startInclusive, endInclusive);
}
protected Query getFuzzyQuery(String field, String termStr, String minSimilarity) throws ParseException {
if (lowercaseExpandedTerms) {
termStr = termStr.toLowerCase(locale);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getFuzzyQuerySingle(fields.iterator().next(), termStr, minSimilarity);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getFuzzyQuerySingle(mField, termStr, minSimilarity);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = getFuzzyQuerySingle(mField, termStr, minSimilarity);
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses, true);
}
} else {
return getFuzzyQuerySingle(field, termStr, minSimilarity);
}
}
private Query getFuzzyQuerySingle(String field, String termStr, String minSimilarity) throws ParseException {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
try {
return currentFieldType.fuzzyQuery(termStr, Fuzziness.build(minSimilarity), fuzzyPrefixLength, settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
}
}
return super.getFuzzyQuery(field, termStr, Float.parseFloat(minSimilarity));
}
@Override
protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) {
String text = term.text();
int numEdits = FuzzyQuery.floatToEdits(minimumSimilarity, text.codePointCount(0, text.length()));
FuzzyQuery query = new FuzzyQuery(term, numEdits, prefixLength, settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions);
QueryParsers.setRewriteMethod(query, settings.fuzzyRewriteMethod());
return query;
}
@Override
protected Query getPrefixQuery(String field, String termStr) throws ParseException {
if (lowercaseExpandedTerms) {
termStr = termStr.toLowerCase(locale);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getPrefixQuerySingle(fields.iterator().next(), termStr);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getPrefixQuerySingle(mField, termStr);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = getPrefixQuerySingle(mField, termStr);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return getPrefixQuerySingle(field, termStr);
}
}
private Query getPrefixQuerySingle(String field, String termStr) throws ParseException {
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
}
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, parseContext);
}
if (query == null) {
query = getPossiblyAnalyzedPrefixQuery(currentFieldType.names().indexName(), termStr);
}
return query;
}
return getPossiblyAnalyzedPrefixQuery(field, termStr);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
} finally {
setAnalyzer(oldAnalyzer);
}
}
private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr) throws ParseException {
if (!analyzeWildcard) {
return super.getPrefixQuery(field, termStr);
}
// get Analyzer from superclass and tokenize the term
TokenStream source;
try {
source = getAnalyzer().tokenStream(field, termStr);
source.reset();
} catch (IOException e) {
return super.getPrefixQuery(field, termStr);
}
List<String> tlist = new ArrayList<>();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
while (true) {
try {
if (!source.incrementToken()) break;
} catch (IOException e) {
break;
}
tlist.add(termAtt.toString());
}
try {
source.close();
} catch (IOException e) {
// ignore
}
if (tlist.size() == 1) {
return super.getPrefixQuery(field, tlist.get(0));
} else {
// build a boolean query with prefix on each one...
List<BooleanClause> clauses = new ArrayList<>();
for (String token : tlist) {
clauses.add(new BooleanClause(super.getPrefixQuery(field, token), BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses, true);
//return super.getPrefixQuery(field, termStr);
/* this means that the analyzer used either added or consumed
* (common for a stemmer) tokens, and we can't build a PrefixQuery */
// throw new ParseException("Cannot build PrefixQuery with analyzer "
// + getAnalyzer().getClass()
// + (tlist.size() > 1 ? " - token(s) added" : " - token consumed"));
}
}
@Override
protected Query getWildcardQuery(String field, String termStr) throws ParseException {
if (termStr.equals("*")) {
// we want to optimize for match all query for the "*:*", and "*" cases
if ("*".equals(field) || Objects.equals(field, this.field)) {
String actualField = field;
if (actualField == null) {
actualField = this.field;
}
if (actualField == null) {
return newMatchAllDocsQuery();
}
if ("*".equals(actualField) || "_all".equals(actualField)) {
return newMatchAllDocsQuery();
}
// effectively, we check if a field exists or not
return fieldQueryExtensions.get(ExistsFieldQueryExtension.NAME).query(parseContext, actualField);
}
}
if (lowercaseExpandedTerms) {
termStr = termStr.toLowerCase(locale);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getWildcardQuerySingle(fields.iterator().next(), termStr);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getWildcardQuerySingle(mField, termStr);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = getWildcardQuerySingle(mField, termStr);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return getWildcardQuerySingle(field, termStr);
}
}
private Query getWildcardQuerySingle(String field, String termStr) throws ParseException {
String indexedNameField = field;
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
}
indexedNameField = currentFieldType.names().indexName();
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);
}
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
} finally {
setAnalyzer(oldAnalyzer);
}
}
private Query getPossiblyAnalyzedWildcardQuery(String field, String termStr) throws ParseException {
if (!analyzeWildcard) {
return super.getWildcardQuery(field, termStr);
}
boolean isWithinToken = (!termStr.startsWith("?") && !termStr.startsWith("*"));
StringBuilder aggStr = new StringBuilder();
StringBuilder tmp = new StringBuilder();
for (int i = 0; i < termStr.length(); i++) {
char c = termStr.charAt(i);
if (c == '?' || c == '*') {
if (isWithinToken) {
try {
TokenStream source = getAnalyzer().tokenStream(field, tmp.toString());
source.reset();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
if (source.incrementToken()) {
String term = termAtt.toString();
if (term.length() == 0) {
// no tokens, just use what we have now
aggStr.append(tmp);
} else {
aggStr.append(term);
}
} else {
// no tokens, just use what we have now
aggStr.append(tmp);
}
source.close();
} catch (IOException e) {
aggStr.append(tmp);
}
tmp.setLength(0);
}
isWithinToken = false;
aggStr.append(c);
} else {
tmp.append(c);
isWithinToken = true;
}
}
if (isWithinToken) {
try {
TokenStream source = getAnalyzer().tokenStream(field, tmp.toString());
source.reset();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
if (source.incrementToken()) {
String term = termAtt.toString();
if (term.length() == 0) {
// no tokens, just use what we have now
aggStr.append(tmp);
} else {
aggStr.append(term);
}
} else {
// no tokens, just use what we have now
aggStr.append(tmp);
}
source.close();
} catch (IOException e) {
aggStr.append(tmp);
}
}
return super.getWildcardQuery(field, aggStr.toString());
}
@Override
protected Query getRegexpQuery(String field, String termStr) throws ParseException {
if (lowercaseExpandedTerms) {
termStr = termStr.toLowerCase(locale);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getRegexpQuerySingle(fields.iterator().next(), termStr);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getRegexpQuerySingle(mField, termStr);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = getRegexpQuerySingle(mField, termStr);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return getRegexpQuerySingle(field, termStr);
}
}
private Query getRegexpQuerySingle(String field, String termStr) throws ParseException {
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
}
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
query = currentFieldType.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, parseContext);
}
if (query == null) {
query = super.getRegexpQuery(field, termStr);
}
return query;
}
return super.getRegexpQuery(field, termStr);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
} finally {
setAnalyzer(oldAnalyzer);
}
}
@Override
protected Query getBooleanQuery(List<BooleanClause> clauses, boolean disableCoord) throws ParseException {
Query q = super.getBooleanQuery(clauses, disableCoord);
if (q == null) {
return null;
}
return fixNegativeQueryIfNeeded(q);
}
private void applyBoost(String field, Query q) {
if (settings.boosts() != null) {
float boost = settings.boosts().getOrDefault(field, 1f);
q.setBoost(boost);
}
}
private Query applySlop(Query q, int slop) {
if (q instanceof PhraseQuery) {
PhraseQuery pq = (PhraseQuery) q;
PhraseQuery.Builder builder = new PhraseQuery.Builder();
builder.setSlop(slop);
final Term[] terms = pq.getTerms();
final int[] positions = pq.getPositions();
for (int i = 0; i < terms.length; ++i) {
builder.add(terms[i], positions[i]);
}
pq = builder.build();
pq.setBoost(q.getBoost());
return pq;
} else if (q instanceof MultiPhraseQuery) {
((MultiPhraseQuery) q).setSlop(slop);
return q;
} else {
return q;
}
}
private Collection<String> extractMultiFields(String field) {
Collection<String> fields = null;
if (field != null) {
fields = parseContext.simpleMatchToIndexNames(field);
} else {
fields = settings.fields();
}
return fields;
}
@Override
public Query parse(String query) throws ParseException {
if (query.trim().isEmpty()) {
// if the query string is empty we return no docs / empty result
// the behavior is simple to change in the client if all docs is required
// or a default query
return new MatchNoDocsQuery();
}
return super.parse(query);
}
}
| |
/*
* This source file was generated by FireStorm/DAO.
*
* If you purchase a full license for FireStorm/DAO you can customize this header file.
*
* For more information please visit http://www.codefutures.com/products/firestorm
*/
package co.edu.sena.pruebas;
import java.math.*;
import java.util.Date;
import java.util.Collection;
import co.edu.sena.controller.dao.ServidorCorreoElectronicoDao;
import co.edu.sena.model.dao.dto.ServidorCorreoElectronico;
import co.edu.sena.controller.dao.exceptions.ServidorCorreoElectronicoDaoException;
import co.edu.sena.controller.factory.ServidorCorreoElectronicoDaoFactory;
public class ServidorCorreoElectronicoDaoSample
{
/**
* Method 'main'
*
* @param arg
* @throws Exception
*/
public static void main(String[] arg) throws Exception
{
// Uncomment one of the lines below to test the generated code
// findAll();
// findWhereCorreoEquals("");
// findWhereContraseniaEquals("");
// findWhereSmtpHostEquals("");
// findWhereSmtpPortEquals(0);
// findWhereSmtpStartTlsEnableEquals(0);
// findWhereSmtpAuthEquals(0);
// findWhereAsuntoRecuperacionEquals("");
// findWhereMensageRecuperacionEquals("");
}
/**
* Method 'findAll'
*
*/
public static void findAll()
{
try {
ServidorCorreoElectronicoDao _dao = getServidorCorreoElectronicoDao();
ServidorCorreoElectronico _result[] = _dao.findAll();
for (int i=0; i<_result.length; i++ ) {
display( _result[i] );
}
}
catch (Exception _e) {
_e.printStackTrace();
}
}
/**
* Method 'findWhereCorreoEquals'
*
* @param correo
*/
public static void findWhereCorreoEquals(String correo)
{
try {
ServidorCorreoElectronicoDao _dao = getServidorCorreoElectronicoDao();
ServidorCorreoElectronico _result[] = _dao.findWhereCorreoEquals(correo);
for (int i=0; i<_result.length; i++ ) {
display( _result[i] );
}
}
catch (Exception _e) {
_e.printStackTrace();
}
}
/**
* Method 'findWhereContraseniaEquals'
*
* @param contrasenia
*/
public static void findWhereContraseniaEquals(String contrasenia)
{
try {
ServidorCorreoElectronicoDao _dao = getServidorCorreoElectronicoDao();
ServidorCorreoElectronico _result[] = _dao.findWhereContraseniaEquals(contrasenia);
for (int i=0; i<_result.length; i++ ) {
display( _result[i] );
}
}
catch (Exception _e) {
_e.printStackTrace();
}
}
/**
* Method 'findWhereSmtpHostEquals'
*
* @param smtpHost
*/
public static void findWhereSmtpHostEquals(String smtpHost)
{
try {
ServidorCorreoElectronicoDao _dao = getServidorCorreoElectronicoDao();
ServidorCorreoElectronico _result[] = _dao.findWhereSmtpHostEquals(smtpHost);
for (int i=0; i<_result.length; i++ ) {
display( _result[i] );
}
}
catch (Exception _e) {
_e.printStackTrace();
}
}
/**
* Method 'findWhereSmtpPortEquals'
*
* @param smtpPort
*/
public static void findWhereSmtpPortEquals(int smtpPort)
{
try {
ServidorCorreoElectronicoDao _dao = getServidorCorreoElectronicoDao();
ServidorCorreoElectronico _result[] = _dao.findWhereSmtpPortEquals(smtpPort);
for (int i=0; i<_result.length; i++ ) {
display( _result[i] );
}
}
catch (Exception _e) {
_e.printStackTrace();
}
}
/**
* Method 'findWhereSmtpStartTlsEnableEquals'
*
* @param smtpStartTlsEnable
*/
public static void findWhereSmtpStartTlsEnableEquals(short smtpStartTlsEnable)
{
try {
ServidorCorreoElectronicoDao _dao = getServidorCorreoElectronicoDao();
ServidorCorreoElectronico _result[] = _dao.findWhereSmtpStartTlsEnableEquals(smtpStartTlsEnable);
for (int i=0; i<_result.length; i++ ) {
display( _result[i] );
}
}
catch (Exception _e) {
_e.printStackTrace();
}
}
/**
* Method 'findWhereSmtpAuthEquals'
*
* @param smtpAuth
*/
public static void findWhereSmtpAuthEquals(short smtpAuth)
{
try {
ServidorCorreoElectronicoDao _dao = getServidorCorreoElectronicoDao();
ServidorCorreoElectronico _result[] = _dao.findWhereSmtpAuthEquals(smtpAuth);
for (int i=0; i<_result.length; i++ ) {
display( _result[i] );
}
}
catch (Exception _e) {
_e.printStackTrace();
}
}
/**
* Method 'findWhereAsuntoRecuperacionEquals'
*
* @param asuntoRecuperacion
*/
public static void findWhereAsuntoRecuperacionEquals(String asuntoRecuperacion)
{
try {
ServidorCorreoElectronicoDao _dao = getServidorCorreoElectronicoDao();
ServidorCorreoElectronico _result[] = _dao.findWhereAsuntoRecuperacionEquals(asuntoRecuperacion);
for (int i=0; i<_result.length; i++ ) {
display( _result[i] );
}
}
catch (Exception _e) {
_e.printStackTrace();
}
}
/**
* Method 'findWhereMensageRecuperacionEquals'
*
* @param mensageRecuperacion
*/
public static void findWhereMensageRecuperacionEquals(String mensageRecuperacion)
{
try {
ServidorCorreoElectronicoDao _dao = getServidorCorreoElectronicoDao();
ServidorCorreoElectronico _result[] = _dao.findWhereMensageRecuperacionEquals(mensageRecuperacion);
for (int i=0; i<_result.length; i++ ) {
display( _result[i] );
}
}
catch (Exception _e) {
_e.printStackTrace();
}
}
/**
* Method 'getServidorCorreoElectronicoDao'
*
* @return ServidorCorreoElectronicoDao
*/
public static ServidorCorreoElectronicoDao getServidorCorreoElectronicoDao()
{
return ServidorCorreoElectronicoDaoFactory.create();
}
/**
* Method 'display'
*
* @param dto
*/
public static void display(ServidorCorreoElectronico dto)
{
StringBuffer buf = new StringBuffer();
buf.append( dto.getCorreo() );
buf.append( ", " );
buf.append( dto.getContrasenia() );
buf.append( ", " );
buf.append( dto.getSmtpHost() );
buf.append( ", " );
buf.append( dto.getSmtpPort() );
buf.append( ", " );
buf.append( dto.getSmtpStartTlsEnable() );
buf.append( ", " );
buf.append( dto.getSmtpAuth() );
buf.append( ", " );
buf.append( dto.getAsuntoRecuperacion() );
buf.append( ", " );
buf.append( dto.getMensageRecuperacion() );
System.out.println( buf.toString() );
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.plugins.DynamicPluginListener;
import com.intellij.ide.plugins.DynamicPlugins;
import com.intellij.ide.plugins.IdeaPluginDescriptor;
import com.intellij.ide.plugins.PluginManagerCore;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.extensions.ExtensionPoint;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.extensions.ExtensionsArea;
import com.intellij.openapi.extensions.PluginDescriptor;
import com.intellij.openapi.extensions.impl.ExtensionPointImpl;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.ShutDownTracker;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.util.CachedValuesManager;
import com.intellij.rt.execution.junit.MapSerializerUtil;
import com.intellij.testFramework.HeavyPlatformTestCase;
import com.intellij.testFramework.LightPlatformTestCase;
import com.intellij.testFramework.TestApplicationManagerKt;
import com.intellij.util.CachedValuesManagerImpl;
import com.intellij.util.SystemProperties;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ref.GCUtil;
import junit.framework.TestCase;
import org.jetbrains.annotations.NotNull;
import org.junit.Assume;
import org.junit.FixMethodOrder;
import org.junit.runners.MethodSorters;
import java.lang.ref.WeakReference;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
/**
* This must be the last test.
*
* @author max
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
@SuppressWarnings({"JUnitTestClassNamingConvention", "UseOfSystemOutOrSystemErr"})
public class _LastInSuiteTest extends TestCase {
private static final Set<String> EXTENSION_POINTS_WHITE_LIST = Collections.emptySet();
@Override
protected void setUp() throws Exception {
super.setUp();
Disposer.setDebugMode(true);
}
@Override
public String getName() {
return getTestName(super.getName());
}
private static String getTestName(String name) {
String buildConf = System.getProperty("teamcity.buildConfName");
return buildConf == null ? name : name + "[" + buildConf + "]";
}
@SuppressWarnings("CallToSystemGC")
public void testDynamicExtensions() {
Assume.assumeTrue(!EXTENSION_POINTS_WHITE_LIST.isEmpty() ||
SystemProperties.getBooleanProperty("intellij.test.all.dynamic.extension.points", false));
Map<ExtensionPoint<?>, Collection<WeakReference<Object>>> extensions = collectDynamicNonPlatformExtensions();
unloadExtensionPoints(extensions.keySet());
startCorePluginUnload();
disposePluginDisposables();
ProjectManager pm = ProjectManager.getInstanceIfCreated();
if (pm != null) {
for (Project project : pm.getOpenProjects()) {
((CachedValuesManagerImpl) CachedValuesManager.getManager(project)).clearCachedValues();
}
}
finishCorePluginUnload();
GCUtil.tryGcSoftlyReachableObjects();
System.gc();
System.gc();
String heapDump = HeavyPlatformTestCase.publishHeapDump("dynamicExtension");
AtomicBoolean failed = new AtomicBoolean(false);
extensions.forEach((ep, references) -> {
String testName = escape(getTestName("Dynamic EP unloading " + ep.getName()));
System.out.printf("##teamcity[testStarted name='%s']%n", testName);
System.out.flush();
List<Object> alive = ContainerUtil.mapNotNull(references, WeakReference::get);
if (!alive.isEmpty()) {
String aliveExtensions = StringUtil.join(alive, o -> o +" ("+o.getClass()+")", "\n");
System.out.printf("##teamcity[%s name='%s' message='%s']%n", MapSerializerUtil.TEST_FAILED, testName,
escape("Not unloaded extensions:\n" + aliveExtensions + "\n\n" + "See testDynamicExtensions output to find a heapDump"));
System.out.flush();
failed.set(true);
}
else {
System.out.printf("##teamcity[testFinished name='%s']%n", testName);
System.out.flush();
}
});
if (failed.get()) {
fail("Some of dynamic extensions have not been unloaded. See individual tests for details. Heap dump: " + heapDump);
}
}
private static void disposePluginDisposables() {
DynamicPlugins.INSTANCE.getPluginDisposables().forEach((plugin, disposable) -> Disposer.dispose(disposable));
}
@NotNull
private static String escape(String s) {
return MapSerializerUtil.escapeStr(s, MapSerializerUtil.STD_ESCAPER);
}
private static void unloadExtensionPoints(@NotNull Set<ExtensionPoint<?>> extensionPoints) {
for (ExtensionPoint<?> ep : extensionPoints) {
ApplicationManager.getApplication().invokeAndWait(() -> {
ep.unregisterExtensions((a, b) -> false, false);
});
}
}
private static void startCorePluginUnload() {
IdeaPluginDescriptor corePlugin = PluginManagerCore.getPlugin(PluginManagerCore.CORE_ID);
assert corePlugin != null;
WriteAction.runAndWait(() -> {
ApplicationManager.getApplication().getMessageBus().syncPublisher(DynamicPluginListener.TOPIC)
.beforePluginUnload(corePlugin, false);
});
}
private static void finishCorePluginUnload() {
IdeaPluginDescriptor corePlugin = PluginManagerCore.getPlugin(PluginManagerCore.CORE_ID);
assert corePlugin != null;
WriteAction.runAndWait(() -> {
ApplicationManager.getApplication().getMessageBus().syncPublisher(DynamicPluginListener.TOPIC)
.pluginUnloaded(corePlugin, false);
});
}
@NotNull
private static Map<ExtensionPoint<?>, Collection<WeakReference<Object>>> collectDynamicNonPlatformExtensions() {
boolean useWhiteList = !SystemProperties.getBooleanProperty("intellij.test.all.dynamic.extension.points", false);
ExtensionsArea area = Extensions.getRootArea();
Map<ExtensionPoint<?>, Collection<WeakReference<Object>>> extensions = new HashMap<>();
ProjectManager pm = ProjectManager.getInstanceIfCreated();
if (pm != null) {
for (Project project : pm.getOpenProjects()) {
collectForArea(project.getExtensionArea(), useWhiteList, extensions);
}
}
collectForArea(area, useWhiteList, extensions);
return extensions;
}
private static void collectForArea(ExtensionsArea area,
boolean useWhiteList,
Map<ExtensionPoint<?>, Collection<WeakReference<Object>>> extensions) {
if (area == null) return;
for (ExtensionPoint<?> ep : area.getExtensionPoints()) {
if (!ep.isDynamic()) continue;
if (useWhiteList && !EXTENSION_POINTS_WHITE_LIST.contains(ep.getName())) continue;
extensions.put(ep, ep.extensions()
.filter(e -> !isPlatformExtension(ep, e))
.map(WeakReference<Object>::new)
.collect(Collectors.toList()));
}
}
private static boolean isPlatformExtension(ExtensionPoint<?> ep, Object extension) {
//noinspection unchecked
PluginDescriptor plugin = ((ExtensionPointImpl<Object>)ep).getPluginDescriptor(extension);
return plugin != null && plugin.getPluginId() == PluginManagerCore.CORE_ID;
}
public void testProjectLeak() {
if (Boolean.getBoolean("idea.test.guimode")) {
Application application = ApplicationManager.getApplication();
application.invokeAndWait(() -> {
IdeEventQueue.getInstance().flushQueue();
application.exit(true, true, false);
});
ShutDownTracker.getInstance().waitFor(100, TimeUnit.SECONDS);
return;
}
TestApplicationManagerKt.disposeApplicationAndCheckForLeaks();
}
public void testStatistics() {
long started = _FirstInSuiteTest.getSuiteStartTime();
if (started != 0) {
long testSuiteDuration = System.nanoTime() - started;
System.out.println(String.format("##teamcity[buildStatisticValue key='ideaTests.totalTimeMs' value='%d']", testSuiteDuration / 1000000));
}
LightPlatformTestCase.reportTestExecutionStatistics();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices.recovery;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.ReleasableBytesReference;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.store.StoreFileMetadata;
import org.elasticsearch.transport.Transports;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
public class MultiFileWriter extends AbstractRefCounted implements Releasable {
public MultiFileWriter(Store store, RecoveryState.Index indexState, String tempFilePrefix, Logger logger, Runnable ensureOpen) {
super("multi_file_writer");
this.store = store;
this.indexState = indexState;
this.tempFilePrefix = tempFilePrefix;
this.logger = logger;
this.ensureOpen = ensureOpen;
}
private final Runnable ensureOpen;
private final AtomicBoolean closed = new AtomicBoolean(false);
private final Logger logger;
private final Store store;
private final RecoveryState.Index indexState;
private final String tempFilePrefix;
private final ConcurrentMap<String, IndexOutput> openIndexOutputs = ConcurrentCollections.newConcurrentMap();
private final ConcurrentMap<String, FileChunkWriter> fileChunkWriters = ConcurrentCollections.newConcurrentMap();
final Map<String, String> tempFileNames = ConcurrentCollections.newConcurrentMap();
public void writeFileChunk(StoreFileMetadata fileMetadata, long position, ReleasableBytesReference content, boolean lastChunk)
throws IOException {
assert Transports.assertNotTransportThread("multi_file_writer");
final FileChunkWriter writer = fileChunkWriters.computeIfAbsent(fileMetadata.name(), name -> new FileChunkWriter());
incRef();
try {
writer.writeChunk(new FileChunk(fileMetadata, content, position, lastChunk));
} finally {
decRef();
}
}
/** Get a temporary name for the provided file name. */
String getTempNameForFile(String origFile) {
return tempFilePrefix + origFile;
}
public IndexOutput getOpenIndexOutput(String key) {
ensureOpen.run();
return openIndexOutputs.get(key);
}
/** remove and {@link IndexOutput} for a given file. It is the caller's responsibility to close it */
public IndexOutput removeOpenIndexOutputs(String name) {
ensureOpen.run();
return openIndexOutputs.remove(name);
}
/**
* Creates an {@link IndexOutput} for the given file name. Note that the
* IndexOutput actually point at a temporary file.
* <p>
* Note: You can use {@link #getOpenIndexOutput(String)} with the same filename to retrieve the same IndexOutput
* at a later stage
*/
public IndexOutput openAndPutIndexOutput(String fileName, StoreFileMetadata metadata, Store store) throws IOException {
ensureOpen.run();
String tempFileName = getTempNameForFile(fileName);
if (tempFileNames.containsKey(tempFileName)) {
throw new IllegalStateException("output for file [" + fileName + "] has already been created");
}
// add first, before it's created
tempFileNames.put(tempFileName, fileName);
IndexOutput indexOutput = store.createVerifyingOutput(tempFileName, metadata, IOContext.DEFAULT);
openIndexOutputs.put(fileName, indexOutput);
return indexOutput;
}
private void innerWriteFileChunk(StoreFileMetadata fileMetadata, long position,
BytesReference content, boolean lastChunk) throws IOException {
final String name = fileMetadata.name();
IndexOutput indexOutput;
if (position == 0) {
indexOutput = openAndPutIndexOutput(name, fileMetadata, store);
} else {
indexOutput = getOpenIndexOutput(name);
}
assert indexOutput.getFilePointer() == position : "file-pointer " + indexOutput.getFilePointer() + " != " + position;
BytesRefIterator iterator = content.iterator();
BytesRef scratch;
while((scratch = iterator.next()) != null) { // we iterate over all pages - this is a 0-copy for all core impls
indexOutput.writeBytes(scratch.bytes, scratch.offset, scratch.length);
}
indexState.addRecoveredBytesToFile(name, content.length());
if (indexOutput.getFilePointer() >= fileMetadata.length() || lastChunk) {
try {
Store.verify(indexOutput);
} finally {
// we are done
indexOutput.close();
}
final String temporaryFileName = getTempNameForFile(name);
assert Arrays.asList(store.directory().listAll()).contains(temporaryFileName) :
"expected: [" + temporaryFileName + "] in " + Arrays.toString(store.directory().listAll());
store.directory().sync(Collections.singleton(temporaryFileName));
IndexOutput remove = removeOpenIndexOutputs(name);
assert remove == null || remove == indexOutput; // remove maybe null if we got finished
}
}
@Override
public void close() {
if (closed.compareAndSet(false, true)) {
decRef();
}
}
@Override
protected void closeInternal() {
Releasables.close(fileChunkWriters.values());
fileChunkWriters.clear();
// clean open index outputs
Iterator<Map.Entry<String, IndexOutput>> iterator = openIndexOutputs.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, IndexOutput> entry = iterator.next();
logger.trace("closing IndexOutput file [{}]", entry.getValue());
try {
entry.getValue().close();
} catch (Exception e) {
logger.debug(() -> new ParameterizedMessage("error while closing recovery output [{}]", entry.getValue()), e);
}
iterator.remove();
}
if (Strings.hasText(tempFilePrefix)) {
// trash temporary files
for (String file : tempFileNames.keySet()) {
logger.trace("cleaning temporary file [{}]", file);
store.deleteQuiet(file);
}
}
}
/** renames all temporary files to their true name, potentially overriding existing files */
public void renameAllTempFiles() throws IOException {
ensureOpen.run();
store.renameTempFilesSafe(tempFileNames);
}
private static final class FileChunk implements Releasable {
final StoreFileMetadata md;
final ReleasableBytesReference content;
final long position;
final boolean lastChunk;
FileChunk(StoreFileMetadata md, ReleasableBytesReference content, long position, boolean lastChunk) {
this.md = md;
this.content = content.retain();
this.position = position;
this.lastChunk = lastChunk;
}
@Override
public void close() {
content.decRef();
}
}
private final class FileChunkWriter implements Releasable {
// chunks can be delivered out of order, we need to buffer chunks if there's a gap between them.
final PriorityQueue<FileChunk> pendingChunks = new PriorityQueue<>(Comparator.comparing(fc -> fc.position));
long lastPosition = 0;
void writeChunk(FileChunk newChunk) throws IOException {
synchronized (this) {
pendingChunks.add(newChunk);
}
while (true) {
final FileChunk chunk;
synchronized (this) {
chunk = pendingChunks.peek();
if (chunk == null || chunk.position != lastPosition) {
return;
}
pendingChunks.remove();
}
try (chunk) {
innerWriteFileChunk(chunk.md, chunk.position, chunk.content, chunk.lastChunk);
synchronized (this) {
assert lastPosition == chunk.position : "last_position " + lastPosition + " != chunk_position " + chunk.position;
lastPosition += chunk.content.length();
if (chunk.lastChunk) {
assert pendingChunks.isEmpty() : "still have pending chunks [" + pendingChunks + "]";
fileChunkWriters.remove(chunk.md.name());
assert fileChunkWriters.containsValue(this) == false : "chunk writer [" + newChunk.md + "] was not removed";
}
}
}
}
}
@Override
public synchronized void close() {
Releasables.close(pendingChunks);
}
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.util;
import com.intellij.CommonBundle;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.icons.AllIcons;
import com.intellij.ide.*;
import com.intellij.ide.actions.ViewStructureAction;
import com.intellij.ide.dnd.aware.DnDAwareTree;
import com.intellij.ide.structureView.ModelListener;
import com.intellij.ide.structureView.StructureView;
import com.intellij.ide.structureView.StructureViewModel;
import com.intellij.ide.structureView.impl.common.PsiTreeElementBase;
import com.intellij.ide.structureView.newStructureView.StructureViewComponent;
import com.intellij.ide.structureView.newStructureView.TreeActionWrapper;
import com.intellij.ide.structureView.newStructureView.TreeActionsOwner;
import com.intellij.ide.structureView.newStructureView.TreeModelWrapper;
import com.intellij.ide.ui.UISettingsListener;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.ide.util.treeView.NodeRenderer;
import com.intellij.ide.util.treeView.smartTree.*;
import com.intellij.internal.statistic.collectors.fus.actions.persistence.ActionsEventLogGroup;
import com.intellij.internal.statistic.eventLog.events.EventFields;
import com.intellij.lang.LangBundle;
import com.intellij.lang.Language;
import com.intellij.navigation.LocationPresentation;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.fileEditor.TextEditor;
import com.intellij.openapi.fileEditor.ex.IdeDocumentHistory;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.LanguageFileType;
import com.intellij.openapi.ide.CopyPasteManager;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.*;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.pom.Navigatable;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.StubBasedPsiElement;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.ui.*;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.ui.popup.AbstractPopup;
import com.intellij.ui.popup.HintUpdateSupply;
import com.intellij.ui.popup.PopupUpdateProcessor;
import com.intellij.ui.scale.JBUIScale;
import com.intellij.ui.speedSearch.ElementFilter;
import com.intellij.ui.tree.AsyncTreeModel;
import com.intellij.ui.tree.StructureTreeModel;
import com.intellij.ui.tree.TreeVisitor;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.ui.treeStructure.filtered.FilteringTreeStructure;
import com.intellij.util.*;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.JBIterable;
import com.intellij.util.text.TextRangeUtil;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.TextTransferable;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.tree.TreeUtil;
import com.intellij.xml.util.XmlStringUtil;
import org.jetbrains.annotations.*;
import org.jetbrains.concurrency.AsyncPromise;
import org.jetbrains.concurrency.Promise;
import org.jetbrains.concurrency.Promises;
import javax.swing.*;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.event.*;
import java.util.List;
import java.util.*;
import java.util.function.BiPredicate;
/**
* @author Konstantin Bulenkov
*/
public class FileStructurePopup implements Disposable, TreeActionsOwner {
private static final Logger LOG = Logger.getInstance(FileStructurePopup.class);
private static final @NonNls String NARROW_DOWN_PROPERTY_KEY = "FileStructurePopup.narrowDown";
private final Project myProject;
private final FileEditor myFileEditor;
private final StructureViewModel myTreeModelWrapper;
private final StructureViewModel myTreeModel;
private final TreeStructureActionsOwner myTreeActionsOwner;
private JBPopup myPopup;
private @NlsContexts.PopupTitle String myTitle;
private final Tree myTree;
private final SmartTreeStructure myTreeStructure;
private final FilteringTreeStructure myFilteringStructure;
private final AsyncTreeModel myAsyncTreeModel;
private final StructureTreeModel myStructureTreeModel;
private final TreeSpeedSearch mySpeedSearch;
private final Object myInitialElement;
private final Map<Class, JBCheckBox> myCheckBoxes = new HashMap<>();
private final List<JBCheckBox> myAutoClicked = new ArrayList<>();
private String myTestSearchFilter;
private final ActionCallback myTreeHasBuilt = new ActionCallback();
private final List<Pair<String, JBCheckBox>> myTriggeredCheckboxes = new ArrayList<>();
private final TreeExpander myTreeExpander;
private final CopyPasteDelegator myCopyPasteDelegator;
private boolean myCanClose = true;
private boolean myDisposed;
/**
* @noinspection unused
* @deprecated use {@link #FileStructurePopup(Project, FileEditor, StructureViewModel)}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.3")
public FileStructurePopup(@NotNull Project project,
@NotNull FileEditor fileEditor,
@NotNull StructureView structureView,
boolean applySortAndFilter) {
this(project, fileEditor, ViewStructureAction.createStructureViewModel(project, fileEditor, structureView));
Disposer.register(this, structureView);
}
public FileStructurePopup(@NotNull Project project,
@NotNull FileEditor fileEditor,
@NotNull StructureViewModel treeModel) {
myProject = project;
myFileEditor = fileEditor;
myTreeModel = treeModel;
//Stop code analyzer to speedup EDT
DaemonCodeAnalyzer.getInstance(myProject).disableUpdateByTimer(this);
IdeFocusManager.getInstance(myProject).typeAheadUntil(myTreeHasBuilt, "FileStructurePopup");
myTreeActionsOwner = new TreeStructureActionsOwner(myTreeModel);
myTreeActionsOwner.setActionIncluded(Sorter.ALPHA_SORTER, true);
myTreeModelWrapper = new TreeModelWrapper(myTreeModel, myTreeActionsOwner);
Disposer.register(this, myTreeModelWrapper);
myTreeStructure = new SmartTreeStructure(project, myTreeModelWrapper) {
@Override
public void rebuildTree() {
if (!ApplicationManager.getApplication().isUnitTestMode() && myPopup.isDisposed()) {
return;
}
ProgressManager.getInstance().computePrioritized(() -> {
super.rebuildTree();
myFilteringStructure.rebuild();
return null;
});
}
@Override
public boolean isToBuildChildrenInBackground(@NotNull Object element) {
return getRootElement() == element;
}
@NotNull
@Override
protected TreeElementWrapper createTree() {
return StructureViewComponent.createWrapper(myProject, myModel.getRoot(), myModel);
}
@NonNls
@Override
public String toString() {
return "structure view tree structure(model=" + myTreeModelWrapper + ")";
}
};
FileStructurePopupFilter filter = new FileStructurePopupFilter();
myFilteringStructure = new FilteringTreeStructure(filter, myTreeStructure, false);
myStructureTreeModel = new StructureTreeModel<>(myFilteringStructure, this);
myAsyncTreeModel = new AsyncTreeModel(myStructureTreeModel, this);
myAsyncTreeModel.setRootImmediately(myStructureTreeModel.getRootImmediately());
myTree = new MyTree(myAsyncTreeModel);
StructureViewComponent.registerAutoExpandListener(myTree, myTreeModel);
ModelListener modelListener = () -> rebuild(false);
myTreeModel.addModelListener(modelListener);
Disposer.register(this, () -> myTreeModel.removeModelListener(modelListener));
myTree.setCellRenderer(new NodeRenderer());
myProject.getMessageBus().connect(this).subscribe(UISettingsListener.TOPIC, o -> rebuild(false));
myTree.setTransferHandler(new TransferHandler() {
@Override
public boolean importData(@NotNull TransferSupport support) {
String s = CopyPasteManager.getInstance().getContents(DataFlavor.stringFlavor);
if (s != null && !mySpeedSearch.isPopupActive()) {
mySpeedSearch.showPopup(s);
return true;
}
return false;
}
@Nullable
@Override
protected Transferable createTransferable(JComponent component) {
JBIterable<Pair<FilteringTreeStructure.FilteringNode, PsiElement>> pairs = JBIterable.of(myTree.getSelectionPaths())
.filterMap(TreeUtil::getLastUserObject)
.filter(FilteringTreeStructure.FilteringNode.class)
.filterMap(o -> o.getDelegate() instanceof PsiElement ? Pair.create(o, (PsiElement)o.getDelegate()) : null)
.collect();
if (pairs.isEmpty()) return null;
Set<PsiElement> psiSelection = pairs.map(Functions.pairSecond()).toSet();
String text = StringUtil.join(pairs, pair -> {
PsiElement psi = pair.second;
String defaultPresentation = pair.first.getPresentation().getPresentableText();
if (psi == null) return defaultPresentation;
for (PsiElement p = psi.getParent(); p != null; p = p.getParent()) {
if (psiSelection.contains(p)) return null;
}
return ObjectUtils.chooseNotNull(psi.getText(), defaultPresentation);
}, "\n");
String htmlText = "<body>\n" + text + "\n</body>";
return new TextTransferable(XmlStringUtil.wrapInHtml(htmlText), text);
}
@Override
public int getSourceActions(JComponent component) {
return COPY;
}
});
mySpeedSearch = new MyTreeSpeedSearch();
mySpeedSearch.setComparator(new SpeedSearchComparator(false, true, " ()"));
myTreeExpander = new DefaultTreeExpander(myTree);
myCopyPasteDelegator = new CopyPasteDelegator(myProject, myTree);
myInitialElement = myTreeModel.getCurrentEditorElement();
TreeUtil.installActions(myTree);
}
public void show() {
JComponent panel = createCenterPanel();
myTree.addTreeSelectionListener(__ -> {
if (myPopup.isVisible()) {
PopupUpdateProcessor updateProcessor = myPopup.getUserData(PopupUpdateProcessor.class);
if (updateProcessor != null) {
AbstractTreeNode node = getSelectedNode();
updateProcessor.updatePopup(node);
}
}
});
myPopup = JBPopupFactory.getInstance().createComponentPopupBuilder(panel, myTree)
.setTitle(myTitle)
.setResizable(true)
.setModalContext(false)
.setFocusable(true)
.setRequestFocus(true)
.setMovable(true)
.setBelongsToGlobalPopupStack(true)
//.setCancelOnClickOutside(false) //for debug and snapshots
.setCancelOnOtherWindowOpen(true)
.setCancelKeyEnabled(false)
.setDimensionServiceKey(myProject, getDimensionServiceKey(), true)
.setCancelCallback(() -> myCanClose)
.setNormalWindowLevel(true)
.createPopup();
Disposer.register(myPopup, this);
Disposer.register(myPopup, () -> {
if (!myTreeHasBuilt.isDone()) {
myTreeHasBuilt.setRejected();
}
});
myTree.getEmptyText().setText(CommonBundle.getLoadingTreeNodeText());
myPopup.showCenteredInCurrentWindow(myProject);
((AbstractPopup)myPopup).setShowHints(true);
IdeFocusManager.getInstance(myProject).requestFocus(myTree, true);
rebuildAndSelect(false, myInitialElement).onProcessed(path -> UIUtil.invokeLaterIfNeeded(() -> {
TreeUtil.ensureSelection(myTree);
myTreeHasBuilt.setDone();
installUpdater();
}));
}
private void installUpdater() {
if (ApplicationManager.getApplication().isUnitTestMode() || myPopup.isDisposed()) {
return;
}
Alarm alarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD, myPopup);
alarm.addRequest(new Runnable() {
String filter = "";
@Override
public void run() {
alarm.cancelAllRequests();
String prefix = mySpeedSearch.getEnteredPrefix();
myTree.getEmptyText().setText(StringUtil.isEmpty(prefix) ? LangBundle.message("status.text.structure.empty")
: "'" + prefix + "' " +
LangBundle.message("status.text.structure.empty.not.found"));
if (prefix == null) prefix = "";
if (!filter.equals(prefix)) {
boolean isBackspace = prefix.length() < filter.length();
filter = prefix;
rebuild(true).onProcessed(ignore -> UIUtil.invokeLaterIfNeeded(() -> {
if (isDisposed()) return;
TreeUtil.promiseExpandAll(myTree);
if (isBackspace && handleBackspace(filter)) {
return;
}
if (myFilteringStructure.getRootElement().getChildren().length == 0) {
for (JBCheckBox box : myCheckBoxes.values()) {
if (!box.isSelected()) {
myAutoClicked.add(box);
myTriggeredCheckboxes.add(0, Pair.create(filter, box));
box.doClick();
filter = "";
break;
}
}
}
}));
}
if (!alarm.isDisposed()) {
alarm.addRequest(this, 300);
}
}
}, 300);
}
private boolean handleBackspace(String filter) {
boolean clicked = false;
Iterator<Pair<String, JBCheckBox>> iterator = myTriggeredCheckboxes.iterator();
while (iterator.hasNext()) {
Pair<String, JBCheckBox> next = iterator.next();
if (next.getFirst().length() < filter.length()) break;
iterator.remove();
next.getSecond().doClick();
clicked = true;
}
return clicked;
}
@NotNull
public Promise<TreePath> select(Object element) {
int[] stage = {1, 0}; // 1 - first pass, 2 - optimization applied, 3 - retry w/o optimization
TreePath[] deepestPath = {null};
TreeVisitor visitor = path -> {
Object last = path.getLastPathComponent();
Object userObject = StructureViewComponent.unwrapNavigatable(last);
Object value = StructureViewComponent.unwrapValue(last);
if (Comparing.equal(value, element) ||
userObject instanceof AbstractTreeNode && ((AbstractTreeNode)userObject).canRepresent(element)) {
return TreeVisitor.Action.INTERRUPT;
}
if (value instanceof PsiElement && element instanceof PsiElement) {
if (PsiTreeUtil.isAncestor((PsiElement)value, (PsiElement)element, true)) {
int count = path.getPathCount();
if (stage[1] == 0 || stage[1] < count) {
stage[1] = count;
deepestPath[0] = path;
}
}
else if (stage[0] != 3) {
stage[0] = 2;
return TreeVisitor.Action.SKIP_CHILDREN;
}
}
return TreeVisitor.Action.CONTINUE;
};
Function<TreePath, Promise<TreePath>> action = path -> {
myTree.expandPath(path);
TreeUtil.selectPath(myTree, path);
TreeUtil.ensureSelection(myTree);
return Promises.resolvedPromise(path);
};
Function<TreePath, Promise<TreePath>> fallback = new Function<>() {
@Override
public Promise<TreePath> fun(TreePath path) {
if (path == null && stage[0] == 2) {
// Some structure views merge unrelated psi elements into a structure node (MarkdownStructureViewModel).
// So turn off the isAncestor() optimization and retry once.
stage[0] = 3;
return myAsyncTreeModel.accept(visitor).thenAsync(this);
}
else {
TreePath adjusted = path == null ? deepestPath[0] : path;
if (path == null && adjusted != null && element instanceof PsiElement) {
Object minChild = findClosestPsiElement((PsiElement)element, adjusted, myAsyncTreeModel);
if (minChild != null) adjusted = adjusted.pathByAddingChild(minChild);
}
return adjusted == null ? Promises.rejectedPromise() : action.fun(adjusted);
}
}
};
return myAsyncTreeModel
.accept(visitor)
.thenAsync(fallback);
}
@TestOnly
public AsyncPromise<Void> rebuildAndUpdate() {
AsyncPromise<Void> result = new AsyncPromise<>();
TreeVisitor visitor = path -> {
AbstractTreeNode node = TreeUtil.getLastUserObject(AbstractTreeNode.class, path);
if (node != null) node.update();
return TreeVisitor.Action.CONTINUE;
};
rebuild(false).onProcessed(ignore1 -> myAsyncTreeModel.accept(visitor).onProcessed(ignore2 -> result.setResult(null)));
return result;
}
public boolean isDisposed() {
return myDisposed;
}
@Override
public void dispose() {
myDisposed = true;
}
private static boolean isShouldNarrowDown() {
return PropertiesComponent.getInstance().getBoolean(NARROW_DOWN_PROPERTY_KEY, true);
}
@NonNls
protected static String getDimensionServiceKey() {
return "StructurePopup";
}
@Nullable
public PsiElement getCurrentElement(@Nullable final PsiFile psiFile) {
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
Object elementAtCursor = myTreeModelWrapper.getCurrentEditorElement();
if (elementAtCursor instanceof PsiElement) {
return (PsiElement)elementAtCursor;
}
if (psiFile != null && myFileEditor instanceof TextEditor) {
return psiFile.getViewProvider().findElementAt(((TextEditor)myFileEditor).getEditor().getCaretModel().getOffset());
}
return null;
}
public JComponent createCenterPanel() {
List<FileStructureFilter> fileStructureFilters = new ArrayList<>();
List<FileStructureNodeProvider> fileStructureNodeProviders = new ArrayList<>();
if (myTreeActionsOwner != null) {
for (Filter filter : myTreeModel.getFilters()) {
if (filter instanceof FileStructureFilter) {
FileStructureFilter fsFilter = (FileStructureFilter)filter;
myTreeActionsOwner.setActionIncluded(fsFilter, true);
fileStructureFilters.add(fsFilter);
}
}
if (myTreeModel instanceof ProvidingTreeModel) {
for (NodeProvider provider : ((ProvidingTreeModel)myTreeModel).getNodeProviders()) {
if (provider instanceof FileStructureNodeProvider) {
fileStructureNodeProviders.add((FileStructureNodeProvider)provider);
}
}
}
}
int checkBoxCount = fileStructureNodeProviders.size() + fileStructureFilters.size();
JPanel panel = new JPanel(new BorderLayout());
panel.setPreferredSize(JBUI.size(540, 500));
JPanel chkPanel = new JPanel(new GridLayout(0, checkBoxCount > 0 && checkBoxCount % 4 == 0 ? checkBoxCount / 2 : 3,
JBUIScale.scale(UIUtil.DEFAULT_HGAP), 0));
chkPanel.setOpaque(false);
Shortcut[] F4 = ActionManager.getInstance().getAction(IdeActions.ACTION_EDIT_SOURCE).getShortcutSet().getShortcuts();
Shortcut[] ENTER = CustomShortcutSet.fromString("ENTER").getShortcuts();
CustomShortcutSet shortcutSet = new CustomShortcutSet(ArrayUtil.mergeArrays(F4, ENTER));
new NavigateSelectedElementAction(panel).registerCustomShortcutSet(shortcutSet, panel);
DumbAwareAction.create(e -> {
if (mySpeedSearch != null && mySpeedSearch.isPopupActive()) {
mySpeedSearch.hidePopup();
}
else {
myPopup.cancel();
}
}).registerCustomShortcutSet(CustomShortcutSet.fromString("ESCAPE"), myTree);
new ClickListener() {
@Override
public boolean onClick(@NotNull MouseEvent e, int clickCount) {
TreePath path = myTree.getClosestPathForLocation(e.getX(), e.getY());
Rectangle bounds = path == null ? null : myTree.getPathBounds(path);
if (bounds == null ||
bounds.x > e.getX() ||
bounds.y > e.getY() || bounds.y + bounds.height < e.getY()) return false;
navigateSelectedElement();
return true;
}
}.installOn(myTree);
for (FileStructureFilter filter : fileStructureFilters) {
addCheckbox(chkPanel, filter);
}
for (FileStructureNodeProvider provider : fileStructureNodeProviders) {
addCheckbox(chkPanel, provider);
}
JPanel topPanel = new JPanel(new BorderLayout());
topPanel.add(chkPanel, BorderLayout.WEST);
topPanel.add(createSettingsButton(), BorderLayout.EAST);
topPanel.setBackground(JBUI.CurrentTheme.Popup.toolbarPanelColor());
Dimension prefSize = topPanel.getPreferredSize();
prefSize.height = JBUI.CurrentTheme.Popup.toolbarHeight();
topPanel.setPreferredSize(prefSize);
topPanel.setBorder(JBUI.Borders.emptyLeft(UIUtil.DEFAULT_HGAP));
panel.add(topPanel, BorderLayout.NORTH);
JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(myTree);
scrollPane.setBorder(IdeBorderFactory.createBorder(JBUI.CurrentTheme.Popup.toolbarBorderColor(), SideBorder.TOP | SideBorder.BOTTOM));
panel.add(scrollPane, BorderLayout.CENTER);
DataManager.registerDataProvider(panel, dataId -> {
if (CommonDataKeys.PROJECT.is(dataId)) {
return myProject;
}
if (PlatformDataKeys.FILE_EDITOR.is(dataId)) {
return myFileEditor;
}
if (OpenFileDescriptor.NAVIGATE_IN_EDITOR.is(dataId)) {
if (myFileEditor instanceof TextEditor) {
return ((TextEditor)myFileEditor).getEditor();
}
}
if (CommonDataKeys.PSI_ELEMENT.is(dataId)) {
return getSelectedElements().filter(PsiElement.class).first();
}
if (LangDataKeys.PSI_ELEMENT_ARRAY.is(dataId)) {
return PsiUtilCore.toPsiElementArray(getSelectedElements().filter(PsiElement.class).toList());
}
if (CommonDataKeys.NAVIGATABLE.is(dataId)) {
return getSelectedElements().filter(Navigatable.class).first();
}
if (CommonDataKeys.NAVIGATABLE_ARRAY.is(dataId)) {
List<Navigatable> result = getSelectedElements().filter(Navigatable.class).toList();
return result.isEmpty() ? null : result.toArray(Navigatable.EMPTY_NAVIGATABLE_ARRAY);
}
if (LangDataKeys.POSITION_ADJUSTER_POPUP.is(dataId)) {
return myPopup;
}
if (PlatformDataKeys.COPY_PROVIDER.is(dataId)) {
return myCopyPasteDelegator.getCopyProvider();
}
if (PlatformDataKeys.TREE_EXPANDER.is(dataId)) {
return myTreeExpander;
}
return null;
});
panel.addFocusListener(new FocusAdapter() {
@Override
public void focusLost(FocusEvent e) {
myPopup.cancel();
}
});
return panel;
}
@NotNull
private JBIterable<Object> getSelectedElements() {
return JBIterable.of(myTree.getSelectionPaths())
.filterMap(o -> StructureViewComponent.unwrapValue(o.getLastPathComponent()));
}
@NotNull
private JComponent createSettingsButton() {
JLabel label = new JLabel(AllIcons.General.GearPlain);
label.setBorder(JBUI.Borders.empty(0, 4));
label.setHorizontalAlignment(SwingConstants.RIGHT);
label.setVerticalAlignment(SwingConstants.CENTER);
List<AnAction> sorters = createSorters();
new ClickListener() {
@Override
public boolean onClick(@NotNull MouseEvent event, int clickCount) {
DefaultActionGroup group = new DefaultActionGroup();
if (!sorters.isEmpty()) {
group.addAll(sorters);
group.addSeparator();
}
//addGroupers(group);
//addFilters(group);
group.add(new ToggleNarrowDownAction());
DataManager dataManager = DataManager.getInstance();
ListPopup popup = JBPopupFactory.getInstance().createActionGroupPopup(
null, group, dataManager.getDataContext(label), JBPopupFactory.ActionSelectionAid.SPEEDSEARCH, false);
popup.addListener(new JBPopupListener() {
@Override
public void onClosed(@NotNull LightweightWindowEvent event) {
myCanClose = true;
}
});
myCanClose = false;
popup.showUnderneathOf(label);
return true;
}
}.installOn(label);
return label;
}
private List<AnAction> createSorters() {
List<AnAction> actions = new ArrayList<>();
for (Sorter sorter : myTreeModel.getSorters()) {
if (sorter.isVisible()) {
actions.add(new MyTreeActionWrapper(sorter));
}
}
return actions;
}
@Nullable
private static Object findClosestPsiElement(@NotNull PsiElement element,
@NotNull TreePath adjusted,
@NotNull TreeModel treeModel) {
TextRange range = element.getTextRange();
if (range == null) return null;
Object parent = adjusted.getLastPathComponent();
int minDistance = 0;
Object minChild = null;
for (int i = 0, count = treeModel.getChildCount(parent); i < count; i++) {
Object child = treeModel.getChild(parent, i);
Object value = StructureViewComponent.unwrapValue(child);
if (value instanceof StubBasedPsiElement && ((StubBasedPsiElement)value).getStub() != null) continue;
TextRange r = value instanceof PsiElement ? ((PsiElement)value).getTextRange() : null;
if (r == null) continue;
int distance = TextRangeUtil.getDistance(range, r);
if (minChild == null || distance < minDistance) {
minDistance = distance;
minChild = child;
}
}
return minChild;
}
private class MyTreeActionWrapper extends TreeActionWrapper {
private final TreeAction myAction;
MyTreeActionWrapper(TreeAction action) {
super(action, myTreeActionsOwner);
myAction = action;
myTreeActionsOwner.setActionIncluded(action, getDefaultValue(action));
}
@Override
public void update(@NotNull AnActionEvent e) {
super.update(e);
e.getPresentation().setIcon(null);
}
@Override
public void setSelected(@NotNull AnActionEvent e, boolean state) {
boolean actionState = TreeModelWrapper.shouldRevert(myAction) != state;
myTreeActionsOwner.setActionIncluded(myAction, actionState);
saveState(myAction, state);
rebuild(false).onProcessed(ignore -> {
if (mySpeedSearch.isPopupActive()) {
mySpeedSearch.refreshSelection();
}
});
}
}
@Nullable
private AbstractTreeNode getSelectedNode() {
TreePath path = myTree.getSelectionPath();
Object o = StructureViewComponent.unwrapNavigatable(path == null ? null : path.getLastPathComponent());
return o instanceof AbstractTreeNode ? (AbstractTreeNode)o : null;
}
private boolean navigateSelectedElement() {
AbstractTreeNode selectedNode = getSelectedNode();
if (ApplicationManager.getApplication().isInternal()) {
String enteredPrefix = mySpeedSearch.getEnteredPrefix();
String itemText = getSpeedSearchText(selectedNode);
if (StringUtil.isNotEmpty(enteredPrefix) && StringUtil.isNotEmpty(itemText)) {
LOG.info("Chosen in file structure popup by prefix '" + enteredPrefix + "': '" + itemText + "'");
}
}
Ref<Boolean> succeeded = new Ref<>();
CommandProcessor commandProcessor = CommandProcessor.getInstance();
commandProcessor.executeCommand(myProject, () -> {
if (selectedNode != null) {
if (selectedNode.canNavigateToSource()) {
selectedNode.navigate(true);
myPopup.cancel();
succeeded.set(true);
}
else {
succeeded.set(false);
}
}
else {
succeeded.set(false);
}
IdeDocumentHistory.getInstance(myProject).includeCurrentCommandAsNavigation();
}, LangBundle.message("command.name.navigate"), null);
return succeeded.get();
}
private void addCheckbox(JPanel panel, TreeAction action) {
String text = action instanceof FileStructureFilter ? ((FileStructureFilter)action).getCheckBoxText() :
action instanceof FileStructureNodeProvider ? ((FileStructureNodeProvider)action).getCheckBoxText() : null;
if (text == null) return;
Shortcut[] shortcuts = extractShortcutFor(action);
JBCheckBox checkBox = new JBCheckBox();
checkBox.setOpaque(false);
UIUtil.applyStyle(UIUtil.ComponentStyle.SMALL, checkBox);
boolean selected = getDefaultValue(action);
checkBox.setSelected(selected);
boolean isRevertedStructureFilter = action instanceof FileStructureFilter && ((FileStructureFilter)action).isReverted();
myTreeActionsOwner.setActionIncluded(action, isRevertedStructureFilter != selected);
checkBox.addActionListener(actionEvent -> {
logFileStructureCheckboxClick(action);
boolean state = checkBox.isSelected();
if (!myAutoClicked.contains(checkBox)) {
saveState(action, state);
}
myTreeActionsOwner.setActionIncluded(action, isRevertedStructureFilter != state);
rebuild(false).onProcessed(ignore -> {
if (mySpeedSearch.isPopupActive()) {
mySpeedSearch.refreshSelection();
}
});
});
checkBox.setFocusable(false);
if (shortcuts.length > 0) {
text += " (" + KeymapUtil.getShortcutText(shortcuts[0]) + ")";
DumbAwareAction.create(e -> checkBox.doClick())
.registerCustomShortcutSet(new CustomShortcutSet(shortcuts), myTree);
}
checkBox.setText(text);
panel.add(checkBox);
myCheckBoxes.put(action.getClass(), checkBox);
}
private void logFileStructureCheckboxClick(TreeAction action) {
Language language = null;
FileType fileType = myFileEditor.getFile().getFileType();
if (fileType instanceof LanguageFileType) {
language = ((LanguageFileType) fileType).getLanguage();
}
ActionsEventLogGroup.ACTION_INVOKED.log(
myProject,
EventFields.PluginInfoFromInstance.with(action),
EventFields.ActionPlace.with(ActionPlaces.FILE_STRUCTURE_POPUP),
EventFields.CurrentFile.with(language),
ActionsEventLogGroup.ACTION_CLASS.with(action.getClass().getName()),
ActionsEventLogGroup.ACTION_ID.with(action.getClass().getName())
);
}
@NotNull
private Promise<Void> rebuild(boolean refilterOnly) {
Object selection = JBIterable.of(myTree.getSelectionPaths())
.filterMap(o -> StructureViewComponent.unwrapValue(o.getLastPathComponent())).first();
return rebuildAndSelect(refilterOnly, selection).then(o -> null);
}
@NotNull
private Promise<TreePath> rebuildAndSelect(boolean refilterOnly, Object selection) {
AsyncPromise<TreePath> result = new AsyncPromise<>();
myStructureTreeModel.getInvoker().invoke(() -> {
if (refilterOnly) {
myFilteringStructure.refilter();
myStructureTreeModel.invalidate().onSuccess(
res ->
(selection == null ? myAsyncTreeModel.accept(o -> TreeVisitor.Action.CONTINUE) : select(selection))
.onError(ignore2 -> result.setError("rejected"))
.onSuccess(p -> UIUtil.invokeLaterIfNeeded(
() -> {
TreeUtil.expand(getTree(), myTreeModel instanceof StructureViewCompositeModel ? 3 : 2);
TreeUtil.ensureSelection(myTree);
mySpeedSearch.refreshSelection();
result.setResult(p);
})));
}
else {
myTreeStructure.rebuildTree();
myStructureTreeModel.invalidate().onSuccess(res -> rebuildAndSelect(true, selection).processed(result));
}
});
return result;
}
static Shortcut @NotNull [] extractShortcutFor(@NotNull TreeAction action) {
if (action instanceof ActionShortcutProvider) {
String actionId = ((ActionShortcutProvider)action).getActionIdForShortcut();
return KeymapUtil.getActiveKeymapShortcuts(actionId).getShortcuts();
}
return action instanceof FileStructureFilter ?
((FileStructureFilter)action).getShortcut() : ((FileStructureNodeProvider)action).getShortcut();
}
private static boolean getDefaultValue(TreeAction action) {
String propertyName = action instanceof PropertyOwner ? ((PropertyOwner)action).getPropertyName() : action.getName();
return PropertiesComponent.getInstance().getBoolean(TreeStructureUtil.getPropertyName(propertyName), Sorter.ALPHA_SORTER.equals(action));
}
private static void saveState(TreeAction action, boolean state) {
String propertyName = action instanceof PropertyOwner ? ((PropertyOwner)action).getPropertyName() : action.getName();
PropertiesComponent.getInstance().setValue(TreeStructureUtil.getPropertyName(propertyName), state, Sorter.ALPHA_SORTER.equals(action));
}
public void setTitle(@NlsContexts.PopupTitle String title) {
myTitle = title;
}
@NotNull
public Tree getTree() {
return myTree;
}
@TestOnly
public TreeSpeedSearch getSpeedSearch() {
return mySpeedSearch;
}
@TestOnly
public void setSearchFilterForTests(String filter) {
myTestSearchFilter = filter;
}
public void setTreeActionState(Class<? extends TreeAction> action, boolean state) {
JBCheckBox checkBox = myCheckBoxes.get(action);
if (checkBox != null) {
checkBox.setSelected(state);
for (ActionListener listener : checkBox.getActionListeners()) {
listener.actionPerformed(new ActionEvent(this, 1, ""));
}
}
}
@Nullable
public static String getSpeedSearchText(Object object) {
String text = String.valueOf(object);
Object value = StructureViewComponent.unwrapWrapper(object);
if (text != null) {
if (value instanceof PsiTreeElementBase && ((PsiTreeElementBase)value).isSearchInLocationString()) {
String locationString = ((PsiTreeElementBase)value).getLocationString();
if (!StringUtil.isEmpty(locationString)) {
String locationPrefix = null;
String locationSuffix = null;
if (value instanceof LocationPresentation) {
locationPrefix = ((LocationPresentation)value).getLocationPrefix();
locationSuffix = ((LocationPresentation)value).getLocationSuffix();
}
return text +
StringUtil.notNullize(locationPrefix, LocationPresentation.DEFAULT_LOCATION_PREFIX) +
locationString +
StringUtil.notNullize(locationSuffix, LocationPresentation.DEFAULT_LOCATION_SUFFIX);
}
}
return text;
}
// NB!: this point is achievable if the following method returns null
// see com.intellij.ide.util.treeView.NodeDescriptor.toString
if (value instanceof TreeElement) {
return ReadAction.compute(() -> ((TreeElement)value).getPresentation().getPresentableText());
}
return null;
}
@Override
public void setActionActive(String name, boolean state) {
}
@Override
public boolean isActionActive(String name) {
return false;
}
private class FileStructurePopupFilter implements ElementFilter {
private String myLastFilter;
private final Set<Object> myVisibleParents = new HashSet<>();
private final boolean isUnitTest = ApplicationManager.getApplication().isUnitTestMode();
@Override
public boolean shouldBeShowing(Object value) {
if (!isShouldNarrowDown()) return true;
String filter = getSearchPrefix();
if (!StringUtil.equals(myLastFilter, filter)) {
myVisibleParents.clear();
myLastFilter = filter;
}
if (filter != null) {
if (myVisibleParents.contains(value)) {
return true;
}
String text = getSpeedSearchText(value);
if (text == null) return false;
if (matches(filter, text)) {
Object o = value;
while (o instanceof FilteringTreeStructure.FilteringNode && (o = ((FilteringTreeStructure.FilteringNode)o).getParent()) != null) {
myVisibleParents.add(o);
}
return true;
}
else {
return false;
}
}
return true;
}
private boolean matches(@NotNull String filter, @NotNull String text) {
return (isUnitTest || mySpeedSearch.isPopupActive()) &&
StringUtil.isNotEmpty(filter) &&
mySpeedSearch.getComparator().matchingFragments(filter, text) != null;
}
}
@Nullable
private String getSearchPrefix() {
if (ApplicationManager.getApplication().isUnitTestMode()) return myTestSearchFilter;
return mySpeedSearch != null && !StringUtil.isEmpty(mySpeedSearch.getEnteredPrefix())
? mySpeedSearch.getEnteredPrefix() : null;
}
private class MyTreeSpeedSearch extends TreeSpeedSearch {
MyTreeSpeedSearch() {
super(myTree, path -> getSpeedSearchText(TreeUtil.getLastUserObject(path)), true);
}
@Override
protected Point getComponentLocationOnScreen() {
return myPopup.getContent().getLocationOnScreen();
}
@Override
protected Rectangle getComponentVisibleRect() {
return myPopup.getContent().getVisibleRect();
}
@Override
public Object findElement(@NotNull String s) {
List<SpeedSearchObjectWithWeight> elements = SpeedSearchObjectWithWeight.findElement(s, this);
SpeedSearchObjectWithWeight best = ContainerUtil.getFirstItem(elements);
if (best == null) return null;
if (myInitialElement instanceof PsiElement) {
PsiElement initial = (PsiElement)myInitialElement;
// find children of the initial element
SpeedSearchObjectWithWeight bestForParent = find(initial, elements, FileStructurePopup::isParent);
if (bestForParent != null) return bestForParent.node;
// find siblings of the initial element
PsiElement parent = initial.getParent();
if (parent != null) {
SpeedSearchObjectWithWeight bestSibling = find(parent, elements, FileStructurePopup::isParent);
if (bestSibling != null) return bestSibling.node;
}
// find grand children of the initial element
SpeedSearchObjectWithWeight bestForAncestor = find(initial, elements, FileStructurePopup::isAncestor);
if (bestForAncestor != null) return bestForAncestor.node;
}
return best.node;
}
}
@Nullable
private static SpeedSearchObjectWithWeight find(@NotNull PsiElement element,
@NotNull List<? extends SpeedSearchObjectWithWeight> objects,
@NotNull BiPredicate<? super PsiElement, ? super TreePath> predicate) {
return ContainerUtil.find(objects, object -> predicate.test(element, ObjectUtils.tryCast(object.node, TreePath.class)));
}
private static boolean isElement(@NotNull PsiElement element, @Nullable TreePath path) {
return element.equals(StructureViewComponent.unwrapValue(TreeUtil.getLastUserObject(FilteringTreeStructure.FilteringNode.class, path)));
}
private static boolean isParent(@NotNull PsiElement parent, @Nullable TreePath path) {
return path != null && isElement(parent, path.getParentPath());
}
private static boolean isAncestor(@NotNull PsiElement ancestor, @Nullable TreePath path) {
while (path != null) {
if (isElement(ancestor, path)) return true;
path = path.getParentPath();
}
return false;
}
static class MyTree extends DnDAwareTree implements PlaceProvider {
MyTree(TreeModel treeModel) {
super(treeModel);
setRootVisible(false);
setShowsRootHandles(true);
HintUpdateSupply.installHintUpdateSupply(this, o -> {
Object value = StructureViewComponent.unwrapValue(o);
return value instanceof PsiElement ? (PsiElement)value : null;
});
}
@Override
public String getPlace() {
return ActionPlaces.STRUCTURE_VIEW_POPUP;
}
}
private class NavigateSelectedElementAction extends DumbAwareAction {
private final JPanel myPanel;
private NavigateSelectedElementAction(JPanel panel) {
myPanel = panel;
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
boolean succeeded = navigateSelectedElement();
if (succeeded) {
unregisterCustomShortcutSet(myPanel);
}
}
}
private class ToggleNarrowDownAction extends ToggleAction {
private ToggleNarrowDownAction() {
super(IdeBundle.message("checkbox.narrow.down.on.typing"));
}
@Override
public boolean isSelected(@NotNull AnActionEvent e) {
return isShouldNarrowDown();
}
@Override
public void setSelected(@NotNull AnActionEvent e, boolean state) {
PropertiesComponent.getInstance().setValue(NARROW_DOWN_PROPERTY_KEY, Boolean.toString(state));
if (mySpeedSearch.isPopupActive() && !StringUtil.isEmpty(mySpeedSearch.getEnteredPrefix())) {
rebuild(true);
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.reducers.ReducerStreams;
import org.elasticsearch.search.aggregations.support.AggregationPath;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
* An internal implementation of {@link Aggregation}. Serves as a base class for all aggregation implementations.
*/
public abstract class InternalAggregation implements Aggregation, ToXContent, Streamable {
/**
* The aggregation type that holds all the string types that are associated with an aggregation:
* <ul>
* <li>name - used as the parser type</li>
* <li>stream - used as the stream type</li>
* </ul>
*/
public static class Type {
private String name;
private BytesReference stream;
public Type(String name) {
this(name, new BytesArray(name));
}
public Type(String name, String stream) {
this(name, new BytesArray(stream));
}
public Type(String name, BytesReference stream) {
this.name = name;
this.stream = stream;
}
/**
* @return The name of the type (mainly used for registering the parser for the aggregator (see {@link org.elasticsearch.search.aggregations.Aggregator.Parser#type()}).
*/
public String name() {
return name;
}
/**
* @return The name of the stream type (used for registering the aggregation stream
* (see {@link AggregationStreams#registerStream(AggregationStreams.Stream, org.elasticsearch.common.bytes.BytesReference...)}).
*/
public BytesReference stream() {
return stream;
}
@Override
public String toString() {
return name;
}
}
public static class ReduceContext {
private final BigArrays bigArrays;
private ScriptService scriptService;
public ReduceContext(BigArrays bigArrays, ScriptService scriptService) {
this.bigArrays = bigArrays;
this.scriptService = scriptService;
}
public BigArrays bigArrays() {
return bigArrays;
}
public ScriptService scriptService() {
return scriptService;
}
}
protected String name;
protected Map<String, Object> metaData;
private List<Reducer> reducers;
/** Constructs an un initialized addAggregation (used for serialization) **/
protected InternalAggregation() {}
/**
* Constructs an get with a given name.
*
* @param name The name of the get.
*/
protected InternalAggregation(String name, List<Reducer> reducers, Map<String, Object> metaData) {
this.name = name;
this.reducers = reducers;
this.metaData = metaData;
}
@Override
public String getName() {
return name;
}
/**
* @return The {@link Type} of this aggregation
*/
public abstract Type type();
/**
* Reduces the given addAggregation to a single one and returns it. In <b>most</b> cases, the assumption will be the all given
* addAggregation are of the same type (the same type as this aggregation). For best efficiency, when implementing,
* try reusing an existing get instance (typically the first in the given list) to save on redundant object
* construction.
*/
public final InternalAggregation reduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) {
InternalAggregation aggResult = doReduce(aggregations, reduceContext);
for (Reducer reducer : reducers) {
aggResult = reducer.reduce(aggResult, reduceContext);
}
return aggResult;
}
public abstract InternalAggregation doReduce(List<InternalAggregation> aggregations, ReduceContext reduceContext);
@Override
public Object getProperty(String path) {
AggregationPath aggPath = AggregationPath.parse(path);
return getProperty(aggPath.getPathElementsAsStringList());
}
public abstract Object getProperty(List<String> path);
/**
* Read a size under the assumption that a value of 0 means unlimited.
*/
protected static int readSize(StreamInput in) throws IOException {
final int size = in.readVInt();
return size == 0 ? Integer.MAX_VALUE : size;
}
/**
* Write a size under the assumption that a value of 0 means unlimited.
*/
protected static void writeSize(int size, StreamOutput out) throws IOException {
if (size == Integer.MAX_VALUE) {
size = 0;
}
out.writeVInt(size);
}
@Override
public Map<String, Object> getMetaData() {
return metaData;
}
public List<Reducer> reducers() {
return reducers;
}
@Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
if (this.metaData != null) {
builder.field(CommonFields.META);
builder.map(this.metaData);
}
doXContentBody(builder, params);
builder.endObject();
return builder;
}
public abstract XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException;
@Override
public final void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeGenericValue(metaData);
out.writeVInt(reducers.size());
for (Reducer reducer : reducers) {
out.writeBytesReference(reducer.type().stream());
reducer.writeTo(out);
}
doWriteTo(out);
}
protected abstract void doWriteTo(StreamOutput out) throws IOException;
@Override
public final void readFrom(StreamInput in) throws IOException {
name = in.readString();
metaData = in.readMap();
int size = in.readVInt();
if (size == 0) {
reducers = ImmutableList.of();
} else {
reducers = Lists.newArrayListWithCapacity(size);
for (int i = 0; i < size; i++) {
BytesReference type = in.readBytesReference();
Reducer reducer = ReducerStreams.stream(type).readResult(in);
reducers.add(reducer);
}
}
doReadFrom(in);
}
protected abstract void doReadFrom(StreamInput in) throws IOException;
/**
* Common xcontent fields that are shared among addAggregation
*/
public static final class CommonFields {
public static final XContentBuilderString META = new XContentBuilderString("meta");
public static final XContentBuilderString BUCKETS = new XContentBuilderString("buckets");
public static final XContentBuilderString VALUE = new XContentBuilderString("value");
public static final XContentBuilderString VALUES = new XContentBuilderString("values");
public static final XContentBuilderString VALUE_AS_STRING = new XContentBuilderString("value_as_string");
public static final XContentBuilderString DOC_COUNT = new XContentBuilderString("doc_count");
public static final XContentBuilderString KEY = new XContentBuilderString("key");
public static final XContentBuilderString KEY_AS_STRING = new XContentBuilderString("key_as_string");
public static final XContentBuilderString FROM = new XContentBuilderString("from");
public static final XContentBuilderString FROM_AS_STRING = new XContentBuilderString("from_as_string");
public static final XContentBuilderString TO = new XContentBuilderString("to");
public static final XContentBuilderString TO_AS_STRING = new XContentBuilderString("to_as_string");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.blob.cloud.s3;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Queue;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.commons.io.IOUtils;
import org.apache.jackrabbit.core.data.DataIdentifier;
import org.apache.jackrabbit.core.data.DataRecord;
import org.apache.jackrabbit.core.data.DataStoreException;
import org.apache.jackrabbit.core.data.util.NamedThreadFactory;
import org.apache.jackrabbit.oak.commons.PropertiesUtil;
import org.apache.jackrabbit.oak.plugins.blob.datastore.directaccess.DataRecordDownloadOptions;
import org.apache.jackrabbit.oak.plugins.blob.datastore.directaccess.DataRecordUpload;
import org.apache.jackrabbit.oak.plugins.blob.datastore.directaccess.DataRecordUploadException;
import org.apache.jackrabbit.oak.plugins.blob.datastore.directaccess.DataRecordUploadToken;
import org.apache.jackrabbit.oak.spi.blob.AbstractDataRecord;
import org.apache.jackrabbit.oak.spi.blob.AbstractSharedBackend;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.HttpMethod;
import com.amazonaws.SdkClientException;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.S3ClientOptions;
import com.amazonaws.services.s3.model.BucketAccelerateConfiguration;
import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest;
import com.amazonaws.services.s3.model.CopyObjectRequest;
import com.amazonaws.services.s3.model.CreateBucketRequest;
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
import com.amazonaws.services.s3.model.DeleteObjectsResult;
import com.amazonaws.services.s3.model.GeneratePresignedUrlRequest;
import com.amazonaws.services.s3.model.GetBucketAccelerateConfigurationRequest;
import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest;
import com.amazonaws.services.s3.model.InitiateMultipartUploadResult;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ListPartsRequest;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PartETag;
import com.amazonaws.services.s3.model.PartListing;
import com.amazonaws.services.s3.model.PartSummary;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.Region;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.amazonaws.services.s3.transfer.Copy;
import com.amazonaws.services.s3.transfer.TransferManager;
import com.amazonaws.services.s3.transfer.Upload;
import com.amazonaws.util.StringUtils;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.base.Strings;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.AbstractIterator;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Iterables.filter;
import static java.lang.Thread.currentThread;
/**
* A data store backend that stores data on Amazon S3.
*/
public class S3Backend extends AbstractSharedBackend {
/**
* Logger instance.
*/
private static final Logger LOG = LoggerFactory.getLogger(S3Backend.class);
private static final Logger LOG_STREAMS_DOWNLOAD = LoggerFactory.getLogger("oak.datastore.download.streams");
private static final Logger LOG_STREAMS_UPLOAD = LoggerFactory.getLogger("oak.datastore.upload.streams");
private static final String KEY_PREFIX = "dataStore_";
private static final String META_KEY_PREFIX = "META/";
private static final String REF_KEY = "reference.key";
private static final int MAX_UNIQUE_RECORD_TRIES = 10;
static final String PART_NUMBER = "partNumber";
static final String UPLOAD_ID = "uploadId";
private static final int ONE_MB = 1024*1024;
static final long MIN_MULTIPART_UPLOAD_PART_SIZE = 1024 * 1024 * 10; // 10MB
static final long MAX_MULTIPART_UPLOAD_PART_SIZE = 1024 * 1024 * 256; // 256MB
static final long MAX_SINGLE_PUT_UPLOAD_SIZE = 1024L * 1024L * 1024L * 5L; // 5GB, AWS limitation
static final long MAX_BINARY_UPLOAD_SIZE = 1024L * 1024L * 1024L * 1024L * 5L; // 5TB, AWS limitation
private static final int MAX_ALLOWABLE_UPLOAD_URIS = 10000; // AWS limitation
private AmazonS3Client s3service;
// needed only in case of transfer acceleration is enabled for presigned URIs
private AmazonS3Client s3PresignService;
private String bucket;
private byte[] secret;
private TransferManager tmx;
private Properties properties;
private Date startTime;
private S3RequestDecorator s3ReqDecorator;
private Cache<DataIdentifier, URI> httpDownloadURICache;
// 0 = off by default
private int httpUploadURIExpirySeconds = 0;
private int httpDownloadURIExpirySeconds = 0;
private boolean presignedDownloadURIVerifyExists = true;
public void init() throws DataStoreException {
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
startTime = new Date();
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
LOG.debug("init");
s3ReqDecorator = new S3RequestDecorator(properties);
s3service = Utils.openService(properties);
s3PresignService = s3service;
if (bucket == null || "".equals(bucket.trim())) {
bucket = properties.getProperty(S3Constants.S3_BUCKET);
// Alternately check if the 'container' property is set
if (Strings.isNullOrEmpty(bucket)) {
bucket = properties.getProperty(S3Constants.S3_CONTAINER);
}
}
String region = properties.getProperty(S3Constants.S3_REGION);
if (StringUtils.isNullOrEmpty(region)) {
com.amazonaws.regions.Region ec2Region = Regions.getCurrentRegion();
if (ec2Region != null) {
region = ec2Region.getName();
} else {
throw new AmazonClientException(
"parameter ["
+ S3Constants.S3_REGION
+ "] not configured and cannot be derived from environment");
}
} else if (Utils.DEFAULT_AWS_BUCKET_REGION.equals(region)) {
region = Region.US_Standard.toString();
}
createBucketIfNeeded(region);
int writeThreads = 10;
String writeThreadsStr = properties.getProperty(S3Constants.S3_WRITE_THREADS);
if (writeThreadsStr != null) {
writeThreads = Integer.parseInt(writeThreadsStr);
}
LOG.info("Using thread pool of [{}] threads in S3 transfer manager.", writeThreads);
tmx = new TransferManager(s3service, Executors.newFixedThreadPool(writeThreads,
new NamedThreadFactory("s3-transfer-manager-worker")));
String renameKeyProp = properties.getProperty(S3Constants.S3_RENAME_KEYS);
boolean renameKeyBool = (renameKeyProp == null || "".equals(renameKeyProp))
? false
: Boolean.parseBoolean(renameKeyProp);
LOG.info("Rename keys [{}]", renameKeyBool);
if (renameKeyBool) {
renameKeys();
}
// settings around pre-signing
String putExpiry = properties.getProperty(S3Constants.PRESIGNED_HTTP_UPLOAD_URI_EXPIRY_SECONDS);
if (putExpiry != null) {
setHttpUploadURIExpirySeconds(Integer.parseInt(putExpiry));
}
String getExpiry = properties.getProperty(S3Constants.PRESIGNED_HTTP_DOWNLOAD_URI_EXPIRY_SECONDS);
if (getExpiry != null) {
final int getExpirySeconds = Integer.parseInt(getExpiry);
setHttpDownloadURIExpirySeconds(getExpirySeconds);
int cacheMaxSize = 0; // off by default
String cacheMaxSizeStr = properties.getProperty(S3Constants.PRESIGNED_HTTP_DOWNLOAD_URI_CACHE_MAX_SIZE);
if (cacheMaxSizeStr != null) {
cacheMaxSize = Integer.parseInt(cacheMaxSizeStr);
}
setHttpDownloadURICacheSize(cacheMaxSize);
}
String enablePresignedAccelerationStr = properties.getProperty(S3Constants.PRESIGNED_URI_ENABLE_ACCELERATION);
setBinaryTransferAccelerationEnabled(enablePresignedAccelerationStr != null && "true".equals(enablePresignedAccelerationStr));
presignedDownloadURIVerifyExists =
PropertiesUtil.toBoolean(properties.get(S3Constants.PRESIGNED_HTTP_DOWNLOAD_URI_VERIFY_EXISTS), true);
LOG.debug("S3 Backend initialized in [{}] ms",
+(System.currentTimeMillis() - startTime.getTime()));
} catch (Exception e) {
LOG.error("Error ", e);
Map<String, Object> filteredMap = Maps.newHashMap();
if (properties != null) {
filteredMap = Maps.filterKeys(Utils.asMap(properties), new Predicate<String>() {
@Override public boolean apply(String input) {
return !input.equals(S3Constants.ACCESS_KEY) &&
!input.equals(S3Constants.SECRET_KEY);
}
});
}
throw new DataStoreException("Could not initialize S3 from " + filteredMap, e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
private void createBucketIfNeeded(final String region) {
try {
if (!s3service.doesBucketExist(bucket)) {
String bucketRegion = region;
if (Utils.US_EAST_1_AWS_BUCKET_REGION.equals(region)) {
// The SDK has changed such that if the region is us-east-1
// the region value should not be provided in the
// request to create the bucket.
// See https://stackoverflow.com/questions/51912072/invalidlocationconstraint-error-while-creating-s3-bucket-when-the-used-command-i
bucketRegion = null;
}
CreateBucketRequest req = new CreateBucketRequest(bucket, bucketRegion);
s3service.createBucket(req);
if (Utils.waitForBucket(s3service, bucket)) {
LOG.error("Bucket [{}] does not exist in [{}] and was not automatically created",
bucket, region);
return;
}
LOG.info("Created bucket [{}] in [{}] ", bucket, region);
} else {
LOG.info("Using bucket [{}] in [{}] ", bucket, region);
}
}
catch (SdkClientException awsException) {
LOG.error("Attempt to create S3 bucket [{}] in [{}] failed",
bucket, region, awsException);
}
}
void setBinaryTransferAccelerationEnabled(boolean enabled) {
if (enabled) {
// verify acceleration is enabled on the bucket
BucketAccelerateConfiguration accelerateConfig = s3service.getBucketAccelerateConfiguration(new GetBucketAccelerateConfigurationRequest(bucket));
if (accelerateConfig.isAccelerateEnabled()) {
// If transfer acceleration is enabled for presigned URIs, we need a separate AmazonS3Client
// instance with the acceleration mode enabled, because we don't want the requests from the
// data store itself to S3 to use acceleration
s3PresignService = Utils.openService(properties);
s3PresignService.setS3ClientOptions(S3ClientOptions.builder().setAccelerateModeEnabled(true).build());
LOG.info("S3 Transfer Acceleration enabled for presigned URIs.");
} else {
LOG.warn("S3 Transfer Acceleration is not enabled on the bucket {}. Will create normal, non-accelerated presigned URIs.",
bucket, S3Constants.PRESIGNED_URI_ENABLE_ACCELERATION);
}
} else {
s3PresignService = s3service;
}
}
/**
* It uploads file to Amazon S3. If file size is greater than 5MB, this
* method uses parallel concurrent connections to upload.
*/
@Override
public void write(DataIdentifier identifier, File file)
throws DataStoreException {
String key = getKeyName(identifier);
ObjectMetadata objectMetaData = null;
long start = System.currentTimeMillis();
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(
getClass().getClassLoader());
// check if the same record already exists
try {
objectMetaData = s3service.getObjectMetadata(bucket, key);
} catch (AmazonServiceException ase) {
if (!(ase.getStatusCode() == 404 || ase.getStatusCode() == 403)) {
throw ase;
}
}
if (objectMetaData != null) {
long l = objectMetaData.getContentLength();
if (l != file.length()) {
throw new DataStoreException("Collision: " + key
+ " new length: " + file.length() + " old length: " + l);
}
LOG.debug("[{}]'s exists, lastmodified = [{}]", key,
objectMetaData.getLastModified().getTime());
CopyObjectRequest copReq = new CopyObjectRequest(bucket, key,
bucket, key);
copReq.setNewObjectMetadata(objectMetaData);
Copy copy = tmx.copy(s3ReqDecorator.decorate(copReq));
try {
copy.waitForCopyResult();
LOG.debug("lastModified of [{}] updated successfully.", identifier);
}catch (Exception e2) {
throw new DataStoreException("Could not upload " + key, e2);
}
}
if (objectMetaData == null) {
try {
// start multipart parallel upload using amazon sdk
Upload up = tmx.upload(s3ReqDecorator.decorate(new PutObjectRequest(
bucket, key, file)));
if (LOG_STREAMS_UPLOAD.isDebugEnabled()) {
// Log message, with exception so we can get a trace to see where the call came from
LOG_STREAMS_UPLOAD.debug("Binary uploaded to S3 - identifier={}", key, new Exception());
}
// wait for upload to finish
up.waitForUploadResult();
LOG.debug("synchronous upload to identifier [{}] completed.", identifier);
} catch (Exception e2 ) {
throw new DataStoreException("Could not upload " + key, e2);
}
}
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
LOG.debug("write of [{}], length=[{}], in [{}]ms",
identifier, file.length(), (System.currentTimeMillis() - start));
}
/**
* Check if record identified by identifier exists in Amazon S3.
*/
@Override
public boolean exists(DataIdentifier identifier) throws DataStoreException {
long start = System.currentTimeMillis();
String key = getKeyName(identifier);
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
ObjectMetadata objectMetaData = s3service.getObjectMetadata(bucket,
key);
if (objectMetaData != null) {
LOG.trace("exists [{}]: [true] took [{}] ms.",
identifier, (System.currentTimeMillis() - start) );
return true;
}
return false;
} catch (AmazonServiceException e) {
if (e.getStatusCode() == 404 || e.getStatusCode() == 403) {
LOG.debug("exists [{}]: [false] took [{}] ms.",
identifier, (System.currentTimeMillis() - start) );
return false;
}
throw new DataStoreException(
"Error occured to getObjectMetadata for key [" + identifier.toString() + "]", e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
@Override
public InputStream read(DataIdentifier identifier)
throws DataStoreException {
long start = System.currentTimeMillis();
String key = getKeyName(identifier);
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
S3Object object = s3service.getObject(bucket, key);
InputStream in = object.getObjectContent();
LOG.debug("[{}] read took [{}]ms", identifier, (System.currentTimeMillis() - start));
if (LOG_STREAMS_DOWNLOAD.isDebugEnabled()) {
// Log message, with exception so we can get a trace to see where the call came from
LOG_STREAMS_DOWNLOAD.debug("Binary downloaded from S3 - identifier={}", key, new Exception());
}
return in;
} catch (AmazonServiceException e) {
throw new DataStoreException("Object not found: " + key, e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
@Override
public Iterator<DataIdentifier> getAllIdentifiers()
throws DataStoreException {
return new RecordsIterator<DataIdentifier>(
new Function<S3ObjectSummary, DataIdentifier>() {
@Override
public DataIdentifier apply(S3ObjectSummary input) {
return new DataIdentifier(getIdentifierName(input.getKey()));
}
});
}
@Override
public void deleteRecord(DataIdentifier identifier)
throws DataStoreException {
long start = System.currentTimeMillis();
String key = getKeyName(identifier);
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(
getClass().getClassLoader());
s3service.deleteObject(bucket, key);
LOG.debug("Identifier [{}] deleted. It took [{}]ms.", new Object[] {
identifier, (System.currentTimeMillis() - start) });
} catch (AmazonServiceException e) {
throw new DataStoreException(
"Could not delete dataIdentifier " + identifier, e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
@Override
public void close() {
// backend is closing. abort all mulitpart uploads from start.
if(s3service.doesBucketExist(bucket)) {
tmx.abortMultipartUploads(bucket, startTime);
}
tmx.shutdownNow();
s3service.shutdown();
LOG.info("S3Backend closed.");
}
public String getBucket() {
return bucket;
}
public void setBucket(String bucket) {
this.bucket = bucket;
}
/**
* Properties used to configure the backend. If provided explicitly
* before init is invoked then these take precedence
*
* @param properties to configure S3Backend
*/
public void setProperties(Properties properties) {
this.properties = properties;
}
@Override
public void addMetadataRecord(final InputStream input, final String name) throws DataStoreException {
checkArgument(input != null, "input should not be null");
checkArgument(!Strings.isNullOrEmpty(name), "name should not be empty");
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
Upload upload = tmx.upload(s3ReqDecorator
.decorate(new PutObjectRequest(bucket, addMetaKeyPrefix(name), input, new ObjectMetadata())));
upload.waitForUploadResult();
} catch (InterruptedException e) {
LOG.error("Error in uploading", e);
throw new DataStoreException("Error in uploading", e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
@Override
public void addMetadataRecord(File input, String name) throws DataStoreException {
checkArgument(input != null, "input should not be null");
checkArgument(!Strings.isNullOrEmpty(name), "name should not be empty");
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
Upload upload = tmx.upload(s3ReqDecorator
.decorate(new PutObjectRequest(bucket, addMetaKeyPrefix(name), input)));
upload.waitForUploadResult();
} catch (InterruptedException e) {
LOG.error("Exception in uploading metadata file {}", new Object[] {input, e});
throw new DataStoreException("Error in uploading metadata file", e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
@Override
public DataRecord getMetadataRecord(String name) {
checkArgument(!Strings.isNullOrEmpty(name), "name should not be empty");
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(
getClass().getClassLoader());
ObjectMetadata meta = s3service.getObjectMetadata(bucket, addMetaKeyPrefix(name));
return new S3DataRecord(this, s3service, bucket, new DataIdentifier(name),
meta.getLastModified().getTime(), meta.getContentLength(), true);
} catch(Exception e) {
LOG.error("Error getting metadata record for {}", name, e);
}
finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
return null;
}
@Override
public List<DataRecord> getAllMetadataRecords(String prefix) {
checkArgument(null != prefix, "prefix should not be null");
List<DataRecord> metadataList = new ArrayList<DataRecord>();
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(
getClass().getClassLoader());
ListObjectsRequest listObjectsRequest =
new ListObjectsRequest().withBucketName(bucket).withPrefix(addMetaKeyPrefix(prefix));
ObjectListing prevObjectListing = s3service.listObjects(listObjectsRequest);
for (final S3ObjectSummary s3ObjSumm : prevObjectListing.getObjectSummaries()) {
metadataList.add(new S3DataRecord(this, s3service, bucket,
new DataIdentifier(stripMetaKeyPrefix(s3ObjSumm.getKey())),
s3ObjSumm.getLastModified().getTime(), s3ObjSumm.getSize(), true));
}
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
return metadataList;
}
@Override
public boolean deleteMetadataRecord(String name) {
checkArgument(!Strings.isNullOrEmpty(name), "name should not be empty");
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(
getClass().getClassLoader());
s3service.deleteObject(bucket, addMetaKeyPrefix(name));
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
return true;
}
@Override
public void deleteAllMetadataRecords(String prefix) {
checkArgument(null != prefix, "prefix should not be empty");
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(
getClass().getClassLoader());
ListObjectsRequest listObjectsRequest =
new ListObjectsRequest().withBucketName(bucket).withPrefix(addMetaKeyPrefix(prefix));
ObjectListing metaList = s3service.listObjects(listObjectsRequest);
List<DeleteObjectsRequest.KeyVersion> deleteList = new ArrayList<DeleteObjectsRequest.KeyVersion>();
for (S3ObjectSummary s3ObjSumm : metaList.getObjectSummaries()) {
deleteList.add(new DeleteObjectsRequest.KeyVersion(s3ObjSumm.getKey()));
}
if (deleteList.size() > 0) {
DeleteObjectsRequest delObjsReq = new DeleteObjectsRequest(bucket);
delObjsReq.setKeys(deleteList);
s3service.deleteObjects(delObjsReq);
}
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
@Override
public Iterator<DataRecord> getAllRecords() {
final AbstractSharedBackend backend = this;
return new RecordsIterator<DataRecord>(
new Function<S3ObjectSummary, DataRecord>() {
@Override
public DataRecord apply(S3ObjectSummary input) {
return new S3DataRecord(backend, s3service, bucket,
new DataIdentifier(getIdentifierName(input.getKey())),
input.getLastModified().getTime(), input.getSize());
}
});
}
@Override
public DataRecord getRecord(DataIdentifier identifier) throws DataStoreException {
long start = System.currentTimeMillis();
String key = getKeyName(identifier);
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
ObjectMetadata object = s3service.getObjectMetadata(bucket, key);
S3DataRecord record = new S3DataRecord(this, s3service, bucket, identifier,
object.getLastModified().getTime(), object.getContentLength());
LOG.debug("Identifier [{}]'s getRecord = [{}] took [{}]ms.",
identifier, record, (System.currentTimeMillis() - start));
return record;
} catch (AmazonServiceException e) {
if (e.getStatusCode() == 404 || e.getStatusCode() == 403) {
LOG.debug(
"getRecord:Identifier [{}] not found. Took [{}] ms.",
identifier, (System.currentTimeMillis() - start));
}
throw new DataStoreException(e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
@Override
public byte[] getOrCreateReferenceKey() throws DataStoreException {
try {
if (secret != null && secret.length != 0) {
return secret;
} else {
byte[] key;
// Try reading from the metadata folder if it exists
if (metadataRecordExists(REF_KEY)) {
key = readMetadataBytes(REF_KEY);
} else {
// Create a new key and then retrieve it to use it
key = super.getOrCreateReferenceKey();
addMetadataRecord(new ByteArrayInputStream(key), REF_KEY);
key = readMetadataBytes(REF_KEY);
}
secret = key;
return secret;
}
} catch (IOException e) {
throw new DataStoreException("Unable to get or create key " + e);
}
}
private byte[] readMetadataBytes(String name) throws IOException, DataStoreException {
DataRecord rec = getMetadataRecord(name);
InputStream stream = null;
try {
stream = rec.getStream();
return IOUtils.toByteArray(stream);
} finally {
IOUtils.closeQuietly(stream);
}
}
@Override
public boolean metadataRecordExists(String name) {
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(
getClass().getClassLoader());
return s3service.doesObjectExist(bucket, addMetaKeyPrefix(name));
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
void setHttpUploadURIExpirySeconds(int seconds) {
this.httpUploadURIExpirySeconds = seconds;
}
private DataIdentifier generateSafeRandomIdentifier() {
return new DataIdentifier(
String.format("%s-%d",
UUID.randomUUID().toString(),
Instant.now().toEpochMilli()
)
);
}
private URI createPresignedPutURI(DataIdentifier identifier) {
if (httpUploadURIExpirySeconds <= 0) {
// feature disabled
return null;
}
return createPresignedURI(identifier, HttpMethod.PUT, httpUploadURIExpirySeconds);
}
void setHttpDownloadURIExpirySeconds(int seconds) {
this.httpDownloadURIExpirySeconds = seconds;
}
void setHttpDownloadURICacheSize(int maxSize) {
// max size 0 or smaller is used to turn off the cache
if (maxSize > 0) {
LOG.info("presigned GET URI cache enabled, maxSize = {} items, expiry = {} seconds", maxSize, httpDownloadURIExpirySeconds / 2);
httpDownloadURICache = CacheBuilder.newBuilder()
.maximumSize(maxSize)
// cache for half the expiry time of the URIs before giving out new ones
.expireAfterWrite(httpDownloadURIExpirySeconds / 2, TimeUnit.SECONDS)
.build();
} else {
LOG.info("presigned GET URI cache disabled");
httpDownloadURICache = null;
}
}
URI createHttpDownloadURI(@NotNull DataIdentifier identifier,
@NotNull DataRecordDownloadOptions downloadOptions) {
if (httpDownloadURIExpirySeconds <= 0) {
// feature disabled
return null;
}
// When running unit test from Maven, it doesn't always honor the @NotNull decorators
if (null == identifier) throw new NullPointerException("identifier");
if (null == downloadOptions) throw new NullPointerException("downloadOptions");
URI uri = null;
// if cache is enabled, check the cache
if (httpDownloadURICache != null) {
uri = httpDownloadURICache.getIfPresent(identifier);
}
if (null == uri) {
if (presignedDownloadURIVerifyExists) {
try {
if (!exists(identifier)) {
LOG.warn("Cannot create download URI for nonexistent blob {}; returning null", getKeyName(identifier));
return null;
}
} catch (DataStoreException e) {
LOG.warn("Cannot create download URI for blob {} (caught DataStoreException); returning null", getKeyName(identifier), e);
return null;
}
}
Map<String, String> requestParams = Maps.newHashMap();
requestParams.put("response-cache-control",
String.format("private, max-age=%d, immutable",
httpDownloadURIExpirySeconds)
);
String contentType = downloadOptions.getContentTypeHeader();
if (! Strings.isNullOrEmpty(contentType)) {
requestParams.put("response-content-type", contentType);
}
String contentDisposition =
downloadOptions.getContentDispositionHeader();
if (! Strings.isNullOrEmpty(contentDisposition)) {
requestParams.put("response-content-disposition",
contentDisposition);
}
uri = createPresignedURI(identifier,
HttpMethod.GET,
httpDownloadURIExpirySeconds,
requestParams);
if (uri != null && httpDownloadURICache != null) {
httpDownloadURICache.put(identifier, uri);
}
}
return uri;
}
DataRecordUpload initiateHttpUpload(long maxUploadSizeInBytes, int maxNumberOfURIs) {
List<URI> uploadPartURIs = Lists.newArrayList();
long minPartSize = MIN_MULTIPART_UPLOAD_PART_SIZE;
long maxPartSize = MAX_MULTIPART_UPLOAD_PART_SIZE;
if (0L >= maxUploadSizeInBytes) {
throw new IllegalArgumentException("maxUploadSizeInBytes must be > 0");
}
else if (0 == maxNumberOfURIs) {
throw new IllegalArgumentException("maxNumberOfURIs must either be > 0 or -1");
}
else if (-1 > maxNumberOfURIs) {
throw new IllegalArgumentException("maxNumberOfURIs must either be > 0 or -1");
}
else if (maxUploadSizeInBytes > MAX_SINGLE_PUT_UPLOAD_SIZE &&
maxNumberOfURIs == 1) {
throw new IllegalArgumentException(
String.format("Cannot do single-put upload with file size %d - exceeds max single-put upload size of %d",
maxUploadSizeInBytes,
MAX_SINGLE_PUT_UPLOAD_SIZE)
);
}
else if (maxUploadSizeInBytes > MAX_BINARY_UPLOAD_SIZE) {
throw new IllegalArgumentException(
String.format("Cannot do upload with file size %d - exceeds max upload size of %d",
maxUploadSizeInBytes,
MAX_BINARY_UPLOAD_SIZE)
);
}
DataIdentifier newIdentifier = generateSafeRandomIdentifier();
String blobId = getKeyName(newIdentifier);
String uploadId = null;
if (httpUploadURIExpirySeconds > 0) {
if (maxNumberOfURIs == 1 ||
maxUploadSizeInBytes <= minPartSize) {
// single put
uploadPartURIs.add(createPresignedPutURI(newIdentifier));
}
else {
// multi-part
InitiateMultipartUploadRequest req = new InitiateMultipartUploadRequest(bucket, blobId);
InitiateMultipartUploadResult res = s3service.initiateMultipartUpload(s3ReqDecorator.decorate(req));
uploadId = res.getUploadId();
long numParts;
if (maxNumberOfURIs > 1) {
long requestedPartSize = (long) Math.ceil(((double) maxUploadSizeInBytes) / ((double) maxNumberOfURIs));
if (requestedPartSize <= maxPartSize) {
numParts = Math.min(
maxNumberOfURIs,
Math.min(
(long) Math.ceil(((double) maxUploadSizeInBytes) / ((double) minPartSize)),
MAX_ALLOWABLE_UPLOAD_URIS
)
);
} else {
throw new IllegalArgumentException(
String.format("Cannot do multi-part upload with requested part size %d", requestedPartSize)
);
}
}
else {
long maximalNumParts = (long) Math.ceil(((double) maxUploadSizeInBytes) / ((double) MIN_MULTIPART_UPLOAD_PART_SIZE));
numParts = Math.min(maximalNumParts, MAX_ALLOWABLE_UPLOAD_URIS);
}
Map<String, String> presignedURIRequestParams = Maps.newHashMap();
for (long blockId = 1; blockId <= numParts; ++blockId) {
presignedURIRequestParams.put("partNumber", String.valueOf(blockId));
presignedURIRequestParams.put("uploadId", uploadId);
uploadPartURIs.add(createPresignedURI(newIdentifier,
HttpMethod.PUT,
httpUploadURIExpirySeconds,
presignedURIRequestParams));
}
}
try {
byte[] secret = getOrCreateReferenceKey();
String uploadToken = new DataRecordUploadToken(blobId, uploadId).getEncodedToken(secret);
return new DataRecordUpload() {
@Override
@NotNull
public String getUploadToken() {
return uploadToken;
}
@Override
public long getMinPartSize() {
return minPartSize;
}
@Override
public long getMaxPartSize() {
return maxPartSize;
}
@Override
@NotNull
public Collection<URI> getUploadURIs() {
return uploadPartURIs;
}
};
} catch (DataStoreException e) {
LOG.warn("Unable to obtain data store key");
}
}
return null;
}
DataRecord completeHttpUpload(@NotNull String uploadTokenStr)
throws DataRecordUploadException, DataStoreException {
if (Strings.isNullOrEmpty(uploadTokenStr)) {
throw new IllegalArgumentException("uploadToken required");
}
DataRecordUploadToken uploadToken = DataRecordUploadToken.fromEncodedToken(uploadTokenStr, getOrCreateReferenceKey());
String key = uploadToken.getBlobId();
DataIdentifier blobId = new DataIdentifier(getIdentifierName(key));
DataRecord record = null;
try {
record = getRecord(blobId);
// If this succeeds this means either it was a "single put" upload
// (we don't need to do anything in this case - blob is already uploaded)
// or it was completed before with the same token.
}
catch (DataStoreException e) {
// record doesn't exist - so this means we are safe to do the complete request
if (uploadToken.getUploadId().isPresent()) {
// An existing upload ID means this is a multi-part upload
String uploadId = uploadToken.getUploadId().get();
ListPartsRequest listPartsRequest = new ListPartsRequest(bucket, key, uploadId);
PartListing listing = s3service.listParts(listPartsRequest);
List<PartETag> eTags = Lists.newArrayList();
long size = 0L;
Date lastModified = null;
for (PartSummary partSummary : listing.getParts()) {
PartETag eTag = new PartETag(partSummary.getPartNumber(), partSummary.getETag());
eTags.add(eTag);
size += partSummary.getSize();
if (null == lastModified || partSummary.getLastModified().after(lastModified)) {
lastModified = partSummary.getLastModified();
}
}
CompleteMultipartUploadRequest completeReq = new CompleteMultipartUploadRequest(
bucket,
key,
uploadId,
eTags
);
s3service.completeMultipartUpload(completeReq);
record = new S3DataRecord(
this,
s3service,
bucket,
blobId,
lastModified.getTime(),
size
);
}
else {
// Something is wrong - upload ID missing from upload token
// but record doesn't exist already, so this is invalid
throw new DataRecordUploadException(
String.format("Unable to finalize direct write of binary %s - upload ID missing from upload token",
blobId)
);
}
}
return record;
}
private URI createPresignedURI(DataIdentifier identifier,
HttpMethod method,
int expirySeconds) {
return createPresignedURI(identifier, method, expirySeconds, Maps.newHashMap());
}
private URI createPresignedURI(DataIdentifier identifier,
HttpMethod method,
int expirySeconds,
Map<String, String> reqParams) {
final String key = getKeyName(identifier);
try {
final Date expiration = new Date();
expiration.setTime(expiration.getTime() + expirySeconds * 1000);
GeneratePresignedUrlRequest request = new GeneratePresignedUrlRequest(bucket, key)
.withMethod(method)
.withExpiration(expiration);
if (method != HttpMethod.GET) {
request = s3ReqDecorator.decorate(request);
}
for (Map.Entry<String, String> e : reqParams.entrySet()) {
request.addRequestParameter(e.getKey(), e.getValue());
}
URI uri = null;
URL presignedURL = null;
try {
presignedURL = s3PresignService.generatePresignedUrl(request);
uri = presignedURL.toURI();
LOG.debug("Presigned {} URI for key {}: {}", method.name(), key, uri.toString());
}
catch (URISyntaxException e) {
LOG.error("AWS request to create presigned S3 URI failed - could not convert '{}' to URI",
(null != presignedURL ? presignedURL.toString() : "")
);
}
return uri;
} catch (AmazonServiceException e) {
LOG.error("AWS request to create presigned S3 {} URI failed. " +
"Key: {}, Error: {}, HTTP Code: {}, AWS Error Code: {}, Error Type: {}, Request ID: {}",
method.name(), key, e.getMessage(), e.getStatusCode(), e.getErrorCode(), e.getErrorType(), e.getRequestId());
return null;
}
}
/**
* Returns an iterator over the S3 objects
* @param <T>
*/
class RecordsIterator<T> extends AbstractIterator<T> {
ObjectListing prevObjectListing;
Queue<S3ObjectSummary> queue;
long size;
Function<S3ObjectSummary, T> transformer;
public RecordsIterator (Function<S3ObjectSummary, T> transformer) {
queue = Lists.newLinkedList();
this.transformer = transformer;
}
@Override
protected T computeNext() {
if (queue.isEmpty()) {
loadBatch();
}
while (queue.isEmpty() && prevObjectListing.getNextMarker() != null) {
LOG.debug("Queue is empty, but there is more data in the S3 bucket");
loadBatch();
}
if (!queue.isEmpty()) {
return transformer.apply(queue.remove());
}
return endOfData();
}
private boolean loadBatch() {
ClassLoader contextClassLoader = currentThread().getContextClassLoader();
long start = System.currentTimeMillis();
try {
currentThread().setContextClassLoader(getClass().getClassLoader());
// initialize the listing the first time
if (prevObjectListing == null) {
ListObjectsRequest listReq = new ListObjectsRequest();
listReq.withBucketName(bucket);
if (properties.containsKey(S3Constants.MAX_KEYS)) {
listReq.setMaxKeys(Integer.valueOf(properties.getProperty(S3Constants.MAX_KEYS)));
}
prevObjectListing = s3service.listObjects(listReq);
} else if (prevObjectListing.isTruncated()) { //already initialized more objects available
prevObjectListing = s3service.listNextBatchOfObjects(prevObjectListing);
} else { // no more available
return false;
}
List<S3ObjectSummary> listing = Lists.newArrayList(
filter(prevObjectListing.getObjectSummaries(),
new Predicate<S3ObjectSummary>() {
@Override
public boolean apply(S3ObjectSummary input) {
return !input.getKey().startsWith(META_KEY_PREFIX);
}
}));
// After filtering no elements
if (listing.isEmpty()) {
return false;
}
size += listing.size();
queue.addAll(listing);
LOG.info("Loaded batch of size [{}] in [{}] ms.",
listing.size(), (System.currentTimeMillis() - start));
return true;
} catch (AmazonServiceException e) {
LOG.warn("Could not list objects", e);
} finally {
if (contextClassLoader != null) {
currentThread().setContextClassLoader(contextClassLoader);
}
}
return false;
}
}
private static String addMetaKeyPrefix(String key) {
return META_KEY_PREFIX + key;
}
private static String stripMetaKeyPrefix(String name) {
if (name.startsWith(META_KEY_PREFIX)) {
return name.substring(META_KEY_PREFIX.length());
}
return name;
}
/**
* S3DataRecord which lazily retrieves the input stream of the record.
*/
static class S3DataRecord extends AbstractDataRecord {
private AmazonS3Client s3service;
private long length;
private long lastModified;
private String bucket;
private boolean isMeta;
public S3DataRecord(AbstractSharedBackend backend, AmazonS3Client s3service, String bucket,
DataIdentifier key, long lastModified,
long length) {
this(backend, s3service, bucket, key, lastModified, length, false);
}
public S3DataRecord(AbstractSharedBackend backend, AmazonS3Client s3service, String bucket,
DataIdentifier key, long lastModified,
long length, boolean isMeta) {
super(backend, key);
this.s3service = s3service;
this.lastModified = lastModified;
this.length = length;
this.bucket = bucket;
this.isMeta = isMeta;
}
@Override
public long getLength() throws DataStoreException {
return length;
}
@Override
public InputStream getStream() throws DataStoreException {
String id = getKeyName(getIdentifier());
if (isMeta) {
id = addMetaKeyPrefix(getIdentifier().toString());
}
else {
// Don't worry about stream logging for metadata records
if (LOG_STREAMS_DOWNLOAD.isDebugEnabled()) {
// Log message, with exception so we can get a trace to see where the call came from
LOG_STREAMS_DOWNLOAD.debug("Binary downloaded from S3 - identifier={}", id, new Exception());
}
}
return s3service.getObject(bucket, id).getObjectContent();
}
@Override
public long getLastModified() {
return lastModified;
}
@Override
public String toString() {
return "S3DataRecord{" +
"identifier=" + getIdentifier() +
", length=" + length +
", lastModified=" + lastModified +
", bucket='" + bucket + '\'' +
'}';
}
}
/**
* This method rename object keys in S3 concurrently. The number of
* concurrent threads is defined by 'maxConnections' property in
* aws.properties. As S3 doesn't have "move" command, this method simulate
* move as copy object object to new key and then delete older key.
*/
private void renameKeys() throws DataStoreException {
long startTime = System.currentTimeMillis();
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
long count = 0;
try {
Thread.currentThread().setContextClassLoader(
getClass().getClassLoader());
ObjectListing prevObjectListing = s3service.listObjects(bucket);
List<DeleteObjectsRequest.KeyVersion> deleteList = new ArrayList<DeleteObjectsRequest.KeyVersion>();
int nThreads = Integer.parseInt(properties.getProperty("maxConnections"));
ExecutorService executor = Executors.newFixedThreadPool(nThreads,
new NamedThreadFactory("s3-object-rename-worker"));
boolean taskAdded = false;
while (true) {
for (S3ObjectSummary s3ObjSumm : prevObjectListing.getObjectSummaries()) {
executor.execute(new KeyRenameThread(s3ObjSumm.getKey()));
taskAdded = true;
count++;
// delete the object if it follows old key name format
if( s3ObjSumm.getKey().startsWith(KEY_PREFIX)) {
deleteList.add(new DeleteObjectsRequest.KeyVersion(
s3ObjSumm.getKey()));
}
}
if (!prevObjectListing.isTruncated()) break;
prevObjectListing = s3service.listNextBatchOfObjects(prevObjectListing);
}
// This will make the executor accept no new threads
// and finish all existing threads in the queue
executor.shutdown();
try {
// Wait until all threads are finish
while (taskAdded
&& !executor.awaitTermination(10, TimeUnit.SECONDS)) {
LOG.info("Rename S3 keys tasks timedout. Waiting again");
}
} catch (InterruptedException ie) {
}
LOG.info("Renamed [{}] keys, time taken [{}]sec", count,
((System.currentTimeMillis() - startTime) / 1000));
// Delete older keys.
if (deleteList.size() > 0) {
DeleteObjectsRequest delObjsReq = new DeleteObjectsRequest(
bucket);
int batchSize = 500, startIndex = 0, size = deleteList.size();
int endIndex = batchSize < size ? batchSize : size;
while (endIndex <= size) {
delObjsReq.setKeys(Collections.unmodifiableList(deleteList.subList(
startIndex, endIndex)));
DeleteObjectsResult dobjs = s3service.deleteObjects(delObjsReq);
LOG.info("Records[{}] deleted in datastore from index [{}] to [{}]",
dobjs.getDeletedObjects().size(), startIndex, (endIndex - 1));
if (endIndex == size) {
break;
} else {
startIndex = endIndex;
endIndex = (startIndex + batchSize) < size
? (startIndex + batchSize)
: size;
}
}
}
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
/**
* The method convert old key format to new format. For e.g. this method
* converts old key dataStore_004cb70c8f87d78f04da41e7547cb434094089ea to
* 004c-b70c8f87d78f04da41e7547cb434094089ea.
*/
private static String convertKey(String oldKey)
throws IllegalArgumentException {
if (!oldKey.startsWith(KEY_PREFIX)) {
return oldKey;
}
String key = oldKey.substring(KEY_PREFIX.length());
return key.substring(0, 4) + Utils.DASH + key.substring(4);
}
/**
* Get key from data identifier. Object is stored with key in S3.
*/
private static String getKeyName(DataIdentifier identifier) {
String key = identifier.toString();
return key.substring(0, 4) + Utils.DASH + key.substring(4);
}
/**
* Get data identifier from key.
*/
private static String getIdentifierName(String key) {
if (!key.contains(Utils.DASH)) {
return null;
} else if (key.contains(META_KEY_PREFIX)) {
return key;
}
return key.substring(0, 4) + key.substring(5);
}
/**
* The class renames object key in S3 in a thread.
*/
private class KeyRenameThread implements Runnable {
private String oldKey;
public void run() {
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(
getClass().getClassLoader());
String newS3Key = convertKey(oldKey);
CopyObjectRequest copReq = new CopyObjectRequest(bucket,
oldKey, bucket, newS3Key);
Copy copy = tmx.copy(s3ReqDecorator.decorate(copReq));
try {
copy.waitForCopyResult();
LOG.debug("[{}] renamed to [{}] ", oldKey, newS3Key);
} catch (InterruptedException ie) {
LOG.error(" Exception in renaming [{}] to [{}] ", ie, oldKey, newS3Key);
}
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(
contextClassLoader);
}
}
}
public KeyRenameThread(String oldKey) {
this.oldKey = oldKey;
}
}
}
| |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
/**
* Tests OptimizeReturns
* @author johnlenz@google.com (John Lenz)
*/
public final class OptimizeReturnsTest extends CompilerTestCase {
@Override
protected CompilerPass getProcessor(Compiler compiler) {
return new OptimizeReturns(compiler);
}
private static final String EXTERNAL_SYMBOLS =
"var extern;extern.externalMethod";
public OptimizeReturnsTest() {
super(EXTERNAL_SYMBOLS);
}
@Override
protected int getNumRepetitions() {
// run pass once.
return 1;
}
@Override
protected void setUp() throws Exception {
super.setUp();
super.enableLineNumberCheck(true);
disableTypeCheck();
}
/**
* Combine source strings using '\n' as the separator.
*/
private static String newlineJoin(String ... parts) {
return LINE_JOINER.join(parts);
}
public void testNoRewriteUsedResult1() throws Exception {
String source = newlineJoin(
"function a(){return 1}",
"var x = a()");
testSame(source);
}
public void testNoRewriteUsedResult2() throws Exception {
String source = newlineJoin(
"var a = function(){return 1}",
"a(); var b = a()");
testSame(source);
}
public void testRewriteUnusedResult1() throws Exception {
String source = newlineJoin(
"function a(){return 1}",
"a()");
String expected = newlineJoin(
"function a(){return}",
"a()");
test(source, expected);
}
public void testRewriteUnusedResult2() throws Exception {
String source = newlineJoin(
"var a; a = function(){return 1}",
"a()");
String expected = newlineJoin(
"var a; a = function(){return}",
"a()");
test(source, expected);
}
public void testRewriteUnusedResult3() throws Exception {
String source = newlineJoin(
"var a = function(){return 1}",
"a()");
String expected = newlineJoin(
"var a = function(){return}",
"a()");
test(source, expected);
}
public void testRewriteUnusedResult4a() throws Exception {
String source = newlineJoin(
"var a = function(){return a()}",
"a()");
testSame(source);
}
public void testRewriteUnusedResult4b() throws Exception {
String source = newlineJoin(
"var a = function b(){return b()}",
"a()");
testSame(source);
}
public void testRewriteUnusedResult4c() throws Exception {
String source = newlineJoin(
"function a(){return a()}",
"a()");
testSame(source);
}
public void testRewriteUnusedResult5() throws Exception {
String source = newlineJoin(
"function a(){}",
"a.prototype.foo = function(args) {return args};",
"var o = new a;",
"o.foo()");
String expected = newlineJoin(
"function a(){}",
"a.prototype.foo = function(args) {return};",
"var o = new a;",
"o.foo()");
test(source, expected);
}
public void testRewriteUnusedResult6() throws Exception {
String source = newlineJoin(
"function a(){return (g = 1)}",
"a()");
String expected = newlineJoin(
"function a(){g = 1;return}",
"a()");
test(source, expected);
}
public void testRewriteUnusedResult7a() throws Exception {
String source = newlineJoin(
"function a() { return 1 }",
"function b() { return a() }",
"function c() { return b() }",
"c();");
String expected = newlineJoin(
"function a() { return 1 }",
"function b() { return a() }",
"function c() { b(); return }",
"c();");
test(source, expected);
}
public void testRewriteUnusedResult7b() throws Exception {
String source = newlineJoin(
"c();",
"function c() { return b() }",
"function b() { return a() }",
"function a() { return 1 }");
// Iteration 1.
String expected = newlineJoin(
"c();",
"function c() { b(); return }",
"function b() { return a() }",
"function a() { return 1 }");
test(source, expected);
// Iteration 2.
source = expected;
expected = newlineJoin(
"c();",
"function c() { b(); return }",
"function b() { a(); return }",
"function a() { return 1 }");
test(source, expected);
// Iteration 3.
source = expected;
expected = newlineJoin(
"c();",
"function c() { b(); return }",
"function b() { a(); return }",
"function a() { return }");
test(source, expected);
}
public void testRewriteUnusedResult8() throws Exception {
String source = newlineJoin(
"function a() { return c() }",
"function b() { return a() }",
"function c() { return b() }",
"c();");
testSame(source);
}
public void testNoRewriteObjLit1() throws Exception {
String source = newlineJoin(
"var a = {b:function(){return 1;}}",
"for(c in a) (a[c])();",
"a.b()");
testSame(source);
}
public void testNoRewriteObjLit2() throws Exception {
String source = newlineJoin(
"var a = {b:function fn(){return 1;}}",
"for(c in a) (a[c])();",
"a.b()");
testSame(source);
}
public void testNoRewriteArrLit() throws Exception {
String source = newlineJoin(
"var a = [function(){return 1;}]",
"(a[0])();");
testSame(source);
}
public void testPrototypeMethod1() throws Exception {
String source = newlineJoin(
"function c(){}",
"c.prototype.a = function(){return 1}",
"var x = new c;",
"x.a()");
String result = newlineJoin(
"function c(){}",
"c.prototype.a = function(){return}",
"var x = new c;",
"x.a()");
test(source, result);
}
public void testPrototypeMethod2() throws Exception {
String source = newlineJoin(
"function c(){}",
"c.prototype.a = function(){return 1}",
"goog.reflect.object({a: 'v'})",
"var x = new c;",
"x.a()");
testSame(source);
}
public void testPrototypeMethod3() throws Exception {
String source = newlineJoin(
"function c(){}",
"c.prototype.a = function(){return 1}",
"var x = new c;",
"for(var key in goog.reflect.object({a: 'v'})){ x[key](); }",
"x.a()");
testSame(source);
}
public void testPrototypeMethod4() throws Exception {
String source = newlineJoin(
"function c(){}",
"c.prototype.a = function(){return 1}",
"var x = new c;",
"for(var key in goog.reflect.object({a: 'v'})){ x[key](); }");
testSame(source);
}
public void testCallOrApply() throws Exception {
// TODO(johnlenz): Add support for .call and .apply
testSame("function a() {return 1}; a.call(new foo);");
testSame("function a() {return 1}; a.apply(new foo);");
}
public void testRewriteUseSiteRemoval() throws Exception {
String source = newlineJoin(
"function a() { return {\"_id\" : 1} }",
"a();");
String expected = newlineJoin(
"function a() { return }",
"a();");
test(source, expected);
}
}
| |
/*
* Copyright 2013 The Kava Project Developers. See the COPYRIGHT file at the top-level directory of this distribution
* and at http://kavaproject.org/COPYRIGHT.
*
* Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the
* MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option. This file may not be copied,
* modified, or distributed except according to those terms.
*/
package org.kavaproject.kavatouch.foundation;
import org.kavaproject.kavatouch.corefoundation.CorePropertyList;
import org.kavaproject.kavatouch.corefoundation.CoreString;
import org.kavaproject.kavatouch.coregraphics.GraphicsAffineTransform;
import org.kavaproject.kavatouch.coregraphics.GraphicsPoint;
import org.kavaproject.kavatouch.coregraphics.GraphicsRect;
import org.kavaproject.kavatouch.coregraphics.GraphicsSize;
import org.kavaproject.kavatouch.foundation.staging.*;
import org.kavaproject.kavatouch.internal.CFunction;
import org.kavaproject.kavatouch.internal.Header;
import org.kavaproject.kavatouch.internal.OccClass;
import org.kavaproject.kavatouch.internal.OccInstanceMethod;
import org.kavaproject.kavatouch.runtime.Creatable;
import org.kavaproject.kavatouch.runtime.Runnable2;
import org.kavaproject.kavatouch.runtime.Runnable4;
import org.kavaproject.kavatouch.uikit.LineBreakMode;
import org.kavaproject.kavatouch.uikit.UIBaselineAdjustment;
import org.kavaproject.kavatouch.uikit.UIFont;
import org.kavaproject.kavatouch.util.OutArg;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Locale;
import java.util.Map;
@Header("NSString")
@OccClass("NSString")
public interface FoundationString extends Coding, Copying, MutableCopying, Comparable<FoundationString>, Creatable,
CoreBridge<CoreString> {
@OccInstanceMethod("writeToFile:atomically:encoding:error:")
boolean writeToFile(String path, boolean atomically, StringEncoding encoding) throws RuntimeException;
@OccInstanceMethod("writeToURL:atomically:encoding:error:")
boolean writeToURL(URL url, boolean atomically, StringEncoding encoding) throws RuntimeException;
@OccInstanceMethod("writeToFile:atomically:")
@Deprecated
boolean writeToFile(String path, boolean atomically);
@OccInstanceMethod("writeToURL:atomically:")
@Deprecated
boolean writeToURL(URL url, boolean atomically);
@OccInstanceMethod("lengthOfBytesUsingEncoding:")
int lengthOfBytes(StringEncoding encoding);
@OccInstanceMethod("maximumLengthOfBytesUsingEncoding:")
int maximumLengthOfBytes(StringEncoding encoding);
@OccInstanceMethod("characterAtIndex:")
char at(int index);
@OccInstanceMethod("getCharacters:range:")
void getCharacters(OutArg<char[]> buffer, Range range);
@OccInstanceMethod("getBytes:maxLength:usedLength:encoding:options:range:remainingRange:")
boolean getBytes(OutArg<byte[]> buffer, int maxLength, OutArg<Integer> usedLength, StringEncoding encoding,
StringEncodingConversionOptions options, Range range, RangePointer remainingRange);
@OccInstanceMethod("getCharacters:")
@Deprecated
void getCharacters(OutArg<char[]> buffer);
@OccInstanceMethod("cStringUsingEncoding:")
byte[] toCString(StringEncoding encoding);
@OccInstanceMethod("getCString:maxLength:encoding:")
boolean getCString(OutArg<byte[]> buffer, int maxLength, StringEncoding encoding);
@OccInstanceMethod("UTF8String")
byte[] toUTF8String();
@OccInstanceMethod("cString")
@Deprecated
byte[] toCString();
@OccInstanceMethod("cStringLength")
@Deprecated
int cStringLength();
@OccInstanceMethod("getCString:")
@Deprecated
void getCString(OutArg<byte[]> buffer);
@OccInstanceMethod("getCString:maxLength:")
@Deprecated
void getCString(OutArg<byte[]> buffer, int maxLength);
@OccInstanceMethod("getCString:maxLength:range:remainingRange:")
@Deprecated
void getCString(OutArg<byte[]> buffer, int maxLength, Range range, Range remainingRange);
@OccInstanceMethod("lossyCString")
@Deprecated
byte[] lossyCString();
@OccInstanceMethod("stringByAppendingFormat:")
FoundationString appendFormat(String format, Object... args);
FoundationString stringByAppendingString(FoundationString instance);
@OccInstanceMethod("stringByAppendingString:")
FoundationString append(String s);
@OccInstanceMethod("componentsSeparatedByString:")
List<FoundationString> components(String separator);
@OccInstanceMethod("componentsSeparatedByCharactersInSet:")
List<FoundationString> components(CharacterSet separators);
@OccInstanceMethod("stringByTrimmingCharactersInSet:")
FoundationString trim(CharacterSet characterSet);
@OccInstanceMethod("substringFromIndex:")
FoundationString substringFromIndex(int index);
@OccInstanceMethod("substringWithRange:")
FoundationString substringWithRange(Range range);
@OccInstanceMethod("substringToIndex:")
FoundationString substringToIndex(int index);
@OccInstanceMethod("rangeOfCharacterFromSet:")
Range range(CharacterSet characterSet);
@OccInstanceMethod("rangeOfCharacterFromSet:options:")
Range range(CharacterSet characterSet, StringCompareOptions options);
@OccInstanceMethod("rangeOfCharacterFromSet:options:range:")
Range range(CharacterSet characterSet, StringCompareOptions options, Range range);
@OccInstanceMethod("rangeOfString:")
Range range(String string);
@OccInstanceMethod("rangeOfString:options:")
Range range(String string, StringCompareOptions options);
@OccInstanceMethod("rangeOfString:options:range:")
Range range(String string, StringCompareOptions options, Range range);
@OccInstanceMethod("rangeOfString:options:range:locale:")
Range range(String string, StringCompareOptions options, Range range, Locale locale);
@OccInstanceMethod("enumerateLinesUsingBlock:")
void enumerateLines(Runnable2<String, Boolean> block);
@OccInstanceMethod("enumerateSubstringsInRange:options:usingBlock:")
void enumerateSubstrings(Range range, StringEnumerationOptions options, Runnable4<String, Range, Range,
Boolean> block);
@OccInstanceMethod("stringByReplacingOccurrencesOfString:withString:")
FoundationString replace(String old, String replacement);
@OccInstanceMethod("stringByReplacingOccurrencesOfString:withString:options:range:")
FoundationString replace(String target, String replacement, StringCompareOptions options, Range searchRange);
@OccInstanceMethod("stringByReplacingCharactersInRange:withString:")
FoundationString replace(Range range, String replacement);
@OccInstanceMethod("getLineStart:end:contentsEnd:forRange:")
void getLine(OutArg<Integer> startIndex, OutArg<Integer> endIndex, OutArg<Integer> contentsEndIndex, Range range);
@OccInstanceMethod("lineRangeForRange:")
Range lineRange(Range range);
@OccInstanceMethod("getParagraphStart:end:contentsEnd:forRange:")
void getParagraph(OutArg<Integer> startIndex, OutArg<Integer> endIndex, OutArg<Integer> contentsEndIndex,
Range range);
@OccInstanceMethod("paragraphRangeForRange:")
Range paragraphRange(Range range);
@OccInstanceMethod("rangeOfComposedCharacterSequenceAtIndex:")
Range rangeOfComposedCharacterSequence(int index);
@OccInstanceMethod("rangeOfComposedCharacterSequencesForRange:")
Range rangeOfComposedCharacterSequences(Range range);
@OccInstanceMethod("propertyList")
CorePropertyList propertyList();
@OccInstanceMethod("propertyListFromStringsFileFormat")
Map<String, String> propertyListFromStringsFileFormat();
@OccInstanceMethod("caseInsensitiveCompare:")
int caseInsensitiveCompare(String other);
@OccInstanceMethod("localizedCaseInsensitiveCompare:")
int localizedCaseInsensitiveCompare(String other);
@OccInstanceMethod("compare:")
@Override
int compareTo(FoundationString other);
@OccInstanceMethod("localizedCompare:")
int localizedCompare(String other);
@OccInstanceMethod("compare:options:")
int compareTo(String other, StringCompareOptions options);
@OccInstanceMethod("compare:options:range:")
int compareTo(String other, StringCompareOptions options, Range range);
@OccInstanceMethod("compare:options:range:locale:")
int compareTo(String other, StringCompareOptions options, Range range, Locale locale);
@OccInstanceMethod("localizedStandardCompare:")
int localizedStandardCompare(String other);
@OccInstanceMethod("hasPrefix:")
boolean hasPrefix(String string);
@OccInstanceMethod("hasSuffix:")
boolean hasSuffix(String string);
@OccInstanceMethod("stringByFoldingWithOptions:locale:")
FoundationString fold(StringCompareOptions options, Locale locale);
@OccInstanceMethod("commonPrefixWithString:options:")
FoundationString commonPrefix(String string, StringCompareOptions options);
@OccInstanceMethod("capitalizedString")
FoundationString capitalize();
@OccInstanceMethod("capitalizedStringWithLocale:")
FoundationString capitalize(Locale locale);
@OccInstanceMethod("lowercaseString")
FoundationString toLower();
@OccInstanceMethod("lowercaseStringWithLocale:")
FoundationString toLower(Locale locale);
@OccInstanceMethod("uppercaseString")
FoundationString toUpper();
@OccInstanceMethod("uppercaseStringWithLocale:")
FoundationString toUpper(Locale locale);
@OccInstanceMethod("decomposedStringWithCanonicalMapping")
FoundationString decomposedStringWithCanonicalMapping();
@OccInstanceMethod("decomposedStringWithCompatibilityMapping")
FoundationString decomposedStringWithCompatibilityMapping();
@OccInstanceMethod("precomposedStringWithCanonicalMapping")
FoundationString precomposedStringWithCanonicalMapping();
@OccInstanceMethod("precomposedStringWithCompatibilityMapping")
FoundationString precomposedStringWithCompatibilityMapping();
@OccInstanceMethod("doubleValue")
double doubleValue();
@OccInstanceMethod("floatValue")
float floatValue();
@OccInstanceMethod("intValue")
int intValue();
@OccInstanceMethod("integerValue")
Integer integerValue();
@OccInstanceMethod("longLongValue")
long longLongValue();
@OccInstanceMethod("boolValue")
boolean boolValue();
@OccInstanceMethod("canBeConvertedToEncoding:")
boolean canBeConvertedToEncoding(StringEncoding encoding);
@OccInstanceMethod("dataUsingEncoding:")
ByteBuffer dataUsingEncoding(StringEncoding encoding);
@OccInstanceMethod("dataUsingEncoding:allowLossyConversion:")
ByteBuffer dataUsingEncoding(StringEncoding encoding, boolean allowLossyConversion);
@OccInstanceMethod("fastestEncoding")
StringEncoding fastestEncoding();
@OccInstanceMethod("smallestEncoding")
StringEncoding smallestEncoding();
@OccInstanceMethod("completePathIntoString:caseSensitive:matchesIntoArray:filterTypes:")
int completePath(OutArg<String> outputName, boolean caseSensitive, List<String> outputArray,
List<FoundationString> filterTypes);
@OccInstanceMethod("fileSystemRepresentation")
FoundationString fileSystemRepresentation();
@OccInstanceMethod("getFileSystemRepresentation:maxLength:")
boolean getFileSystemRepresentation(char[] buffer, int maxLength);
@OccInstanceMethod("isAbsolutePath")
boolean isAbsolutePath();
@OccInstanceMethod("lastPathComponent")
FoundationString lastPathComponent();
@OccInstanceMethod("pathComponents")
List<FoundationString> pathComponents();
@OccInstanceMethod("stringByAbbreviatingWithTildeInPath")
FoundationString abbreviateWithTildeInPath();
FoundationString appendPathComponent(FoundationString name);
@OccInstanceMethod("stringByAppendingPathComponent:")
FoundationString appendPathComponent(String pathComponent);
FoundationString appendPathExtension(FoundationString extension);
@OccInstanceMethod("stringByAppendingPathExtension:")
FoundationString appendPathExtension(String extension);
@OccInstanceMethod("stringByDeletingLastPathComponent")
FoundationString deleteLastPathComponent();
@OccInstanceMethod("stringByDeletingPathExtension")
FoundationString deletePathExtension();
@OccInstanceMethod("pathExtension")
FoundationString pathExtension();
@OccInstanceMethod("stringByPaddingToLength:withString:startingAtIndex:")
FoundationString pad(int newLength, String padString, int startIndex);
@OccInstanceMethod("length")
int length();
@OccInstanceMethod("stringByExpandingTildeInPath")
FoundationString expandTildeInPath();
@OccInstanceMethod("stringByResolvingSymlinksInPath")
FoundationString resolveSymlinksInPath();
@OccInstanceMethod("stringByStandardizingPath")
FoundationString standardizePath();
@OccInstanceMethod("stringsByAppendingPaths:")
List<FoundationString> appendPaths(List<String> paths);
@OccInstanceMethod("stringByAddingPercentEscapesUsingEncoding:")
FoundationString addPercentEscapes(StringEncoding encoding);
@OccInstanceMethod("stringByReplacingPercentEscapesUsingEncoding:")
FoundationString replacePercentEscapes(StringEncoding encoding);
@OccInstanceMethod("enumerateLinguisticTagsInRange:scheme:options:orthography:usingBlock:")
void enumerateLinguisticTags(Range range, String tagScheme, LinguisticTaggerOptions options,
Orthography orthography, Runnable4<String, Range, Range, Boolean> block);
@OccInstanceMethod("linguisticTagsInRange:scheme:options:orthography:tokenRanges:")
List<LinguisticTag> linguisticTags(Range range, String tagScheme, LinguisticTaggerOptions options,
Orthography orthography, List<Range> tokenRanges);
@Header("UIStringDrawing")
@OccInstanceMethod(value = "sizeWithFont:", tokenGroup = "NSString_UIKit_Additions")
GraphicsSize stringSize(UIFont font);
@Header("UIStringDrawing")
@OccInstanceMethod(value = "sizeWithFont:forWidth:lineBreakMode:", tokenGroup = "NSString_UIKit_Additions")
GraphicsSize stringSize(UIFont font, float width, LineBreakMode lineBreakMode);
@Header("UIStringDrawing")
@OccInstanceMethod(value = "sizeWithFont:minFontSize:actualFontSize:forWidth:lineBreakMode:",
tokenGroup = "NSString_UIKit_Additions")
GraphicsSize stringSize(UIFont font, float minFontSize, float actualFontSize, float width,
UILineBreakMode lineBreakMode);
@Header("UIStringDrawing")
@OccInstanceMethod(value = "sizeWithFont:constrainedToSize:", tokenGroup = "NSString_UIKit_Additions")
GraphicsSize stringSize(UIFont font, GraphicsSize size);
@Header("UIStringDrawing")
@OccInstanceMethod(value = "sizeWithFont:constrainedToSize:lineBreakMode:", tokenGroup = "NSString_UIKit_Additions")
GraphicsSize stringSize(UIFont font, GraphicsSize size, UILineBreakMode lineBreakMode);
@Header("UIStringDrawing")
@OccInstanceMethod(value = "drawAtPoint:withFont:", tokenGroup = "NSString_UIKit_Additions")
GraphicsSize drawString(GraphicsPoint point, UIFont font);
@Header("UIStringDrawing")
@OccInstanceMethod(value = "drawAtPoint:forWidth:withFont:lineBreakMode:", tokenGroup = "NSString_UIKit_Additions")
GraphicsSize drawString(GraphicsPoint point, float width, UIFont font, UILineBreakMode lineBreakMode);
@Header("UIStringDrawing")
@OccInstanceMethod(value = "drawAtPoint:forWidth:withFont:fontSize:lineBreakMode:baselineAdjustment:",
tokenGroup = "NSString_UIKit_Additions")
GraphicsSize drawString(GraphicsPoint point, float width, UIFont font, float fontSize,
UILineBreakMode lineBreakMode, UIBaselineAdjustment baselineAdjustment);
@Header("UIStringDrawing")
@OccInstanceMethod(value = "drawAtPoint:forWidth:withFont:minFontSize:actualFontSize:lineBreakMode" +
":baselineAdjustment:",
tokenGroup = "NSString_UIKit_Additions")
GraphicsSize drawString(GraphicsPoint point, float width, UIFont font, float minFontSize, float actualFontSize,
UILineBreakMode lineBreakMode, UIBaselineAdjustment baselineAdjustment);
@Header("UIStringDrawing")
@OccInstanceMethod(value = "drawInRect:withFont:", tokenGroup = "NSString_UIKit_Additions")
GraphicsSize drawString(GraphicsRect rect, UIFont font);
@Header("UIStringDrawing")
@OccInstanceMethod(value = "drawInRect:withFont:lineBreakMode:", tokenGroup = "NSString_UIKit_Additions")
GraphicsSize drawString(GraphicsRect rect, UIFont font, UILineBreakMode lineBreakMode);
@Header("UIStringDrawing")
@OccInstanceMethod(value = "drawInRect:withFont:lineBreakMode:alignment:", tokenGroup = "NSString_UIKit_Additions")
GraphicsSize drawString(GraphicsRect rect, UIFont font, UILineBreakMode lineBreakMode, UITextAlignment alignment);
@Header("UIGeometry")
@CFunction(value = "CGAffineTransformFromString", tokenGroup = "UIKit")
GraphicsAffineTransform toCGAffineTransform();
@Header("UIGeometry")
@CFunction(value = "CGPointFromString", tokenGroup = "UIKit")
GraphicsPoint toCGPoint();
@Header("UIGeometry")
@CFunction(value = "CGRectFromString", tokenGroup = "UIKit")
GraphicsRect toCGRect();
@Header("UIGeometry")
@CFunction(value = "CGSizeFromString", tokenGroup = "UIKit")
GraphicsSize toCGSize();
@Override
FoundationStringFactory getFactory();
}
| |
package org.yeastrc.xlink.www.qc_data.psm_level_data_merged.main;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import org.slf4j.LoggerFactory; import org.slf4j.Logger;
import org.yeastrc.xlink.searcher_psm_peptide_cutoff_objects.SearcherCutoffValuesRootLevel;
import org.yeastrc.xlink.searcher_psm_peptide_cutoff_objects.SearcherCutoffValuesSearchLevel;
import org.yeastrc.xlink.utils.XLinkUtils;
import org.yeastrc.xlink.www.constants.PeptideViewLinkTypesConstants;
import org.yeastrc.xlink.www.dto.SearchDTO;
import org.yeastrc.xlink.www.exceptions.ProxlWebappDataException;
import org.yeastrc.xlink.www.exceptions.ProxlWebappInternalErrorException;
import org.yeastrc.xlink.www.form_query_json_objects.CutoffValuesRootLevel;
import org.yeastrc.xlink.www.form_query_json_objects.QCPageQueryJSONRoot;
import org.yeastrc.xlink.www.form_query_json_objects.Z_CutoffValuesObjectsToOtherObjectsFactory;
import org.yeastrc.xlink.www.form_query_json_objects.Z_CutoffValuesObjectsToOtherObjectsFactory.Z_CutoffValuesObjectsToOtherObjects_RootResult;
import org.yeastrc.xlink.www.qc_data.a_enums.ForDownload_Enum;
import org.yeastrc.xlink.www.qc_data.psm_level_data_merged.objects.PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Results;
import org.yeastrc.xlink.www.qc_data.psm_level_data_merged.objects.PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Results.PreMZ_Chart_For_PSMPeptideCutoffsResultsForLinkType;
import org.yeastrc.xlink.www.qc_data.psm_level_data_merged.objects.PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Results.PreMZ_Chart_For_PSMPeptideCutoffsResultsForSearchId;
import org.yeastrc.xlink.www.qc_data.utils.BoxPlotUtils;
import org.yeastrc.xlink.www.qc_data.utils.BoxPlotUtils.GetBoxPlotValuesResult;
import org.yeastrc.xlink.www.searcher.PreMZ_For_PSMPeptideCutoffsSearcher;
import org.yeastrc.xlink.www.searcher.PreMZ_For_PSMPeptideCutoffsSearcher.PreMZ_For_PSMPeptideCutoffsResult;
import org.yeastrc.xlink.www.web_utils.GetLinkTypesForSearchers;
/**
*
*
*/
public class PreMZ_Chart_For_PSMPeptideCutoffs_Merged {
private static final Logger log = LoggerFactory.getLogger( PreMZ_Chart_For_PSMPeptideCutoffs_Merged.class);
/**
* private constructor
*/
private PreMZ_Chart_For_PSMPeptideCutoffs_Merged(){}
public static PreMZ_Chart_For_PSMPeptideCutoffs_Merged getInstance( ) throws Exception {
PreMZ_Chart_For_PSMPeptideCutoffs_Merged instance = new PreMZ_Chart_For_PSMPeptideCutoffs_Merged();
return instance;
}
/**
* Response from call to getPreMZ_Chart_For_PSMPeptideCutoffs_Merged(...)
*
*/
public static class PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Method_Response {
private PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Results preMZ_Chart_For_PSMPeptideCutoffs_Merged_Results;
/**
* Lists of preMZ mapped by search id then link type
* Map<[link type],Map<[Search id]>,List<[preMZ]>>>
*/
private Map<String,Map<Integer,List<BigDecimal>>> allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType;
public PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Results getPreMZ_Chart_For_PSMPeptideCutoffs_Merged_Results() {
return preMZ_Chart_For_PSMPeptideCutoffs_Merged_Results;
}
public void setPreMZ_Chart_For_PSMPeptideCutoffs_Merged_Results(
PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Results preMZ_Chart_For_PSMPeptideCutoffs_Merged_Results) {
this.preMZ_Chart_For_PSMPeptideCutoffs_Merged_Results = preMZ_Chart_For_PSMPeptideCutoffs_Merged_Results;
}
/**
* Lists of preMZ mapped by search id then link type
* Map<[link type],Map<[Search id]>,List<[preMZ]>>>
* @return
*/
public Map<String, Map<Integer, List<BigDecimal>>> getAllSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType() {
return allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType;
}
public void setAllSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType(
Map<String, Map<Integer, List<BigDecimal>>> allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType) {
this.allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType = allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType;
}
}
/**
* @param filterCriteriaJSON
* @param projectSearchIdsListDeduppedSorted
* @param searches
* @param searchesMapOnSearchId
* @return
* @throws Exception
*/
public PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Method_Response getPreMZ_Chart_For_PSMPeptideCutoffs_Merged(
ForDownload_Enum forDownload,
QCPageQueryJSONRoot qcPageQueryJSONRoot,
List<Integer> projectSearchIdsListDeduppedSorted,
List<SearchDTO> searches,
Map<Integer, SearchDTO> searchesMapOnSearchId ) throws Exception {
Collection<Integer> searchIds = new HashSet<>();
Map<Integer,Integer> mapProjectSearchIdToSearchId = new HashMap<>();
List<Integer> searchIdsListDeduppedSorted = new ArrayList<>( searches.size() );
for ( SearchDTO search : searches ) {
searchIds.add( search.getSearchId() );
searchIdsListDeduppedSorted.add( search.getSearchId() );
mapProjectSearchIdToSearchId.put( search.getProjectSearchId(), search.getSearchId() );
}
String[] linkTypesFromURL = qcPageQueryJSONRoot.getLinkTypes();
if ( linkTypesFromURL == null || linkTypesFromURL.length == 0 ) {
String msg = "no link types specified";
log.error( msg );
throw new ProxlWebappDataException(msg);
}
// Create link types in lower case for display and upper case for being like the selection from web page if came from other place
List<String> linkTypesList = new ArrayList<String>( linkTypesFromURL.length );
{
String[] linkTypesFromURLUpdated = new String[ linkTypesFromURL.length ];
int linkTypesFromURLUpdatedIndex = 0;
for ( String linkTypeFromWeb : linkTypesFromURL ) {
String linkTypeRequestUpdated = null;
String linkTypeDisplay = null;
if ( PeptideViewLinkTypesConstants.CROSSLINK_PSM.equals( linkTypeFromWeb ) || XLinkUtils.CROSS_TYPE_STRING.equals( linkTypeFromWeb ) ) {
linkTypeRequestUpdated = PeptideViewLinkTypesConstants.CROSSLINK_PSM;
linkTypeDisplay = XLinkUtils.CROSS_TYPE_STRING;
} else if ( PeptideViewLinkTypesConstants.LOOPLINK_PSM.equals( linkTypeFromWeb ) || XLinkUtils.LOOP_TYPE_STRING.equals( linkTypeFromWeb ) ) {
linkTypeRequestUpdated = PeptideViewLinkTypesConstants.LOOPLINK_PSM;
linkTypeDisplay = XLinkUtils.LOOP_TYPE_STRING;
} else if ( PeptideViewLinkTypesConstants.UNLINKED_PSM.equals( linkTypeFromWeb ) || XLinkUtils.UNLINKED_TYPE_STRING.equals( linkTypeFromWeb ) ) {
linkTypeRequestUpdated = PeptideViewLinkTypesConstants.UNLINKED_PSM;
linkTypeDisplay = XLinkUtils.UNLINKED_TYPE_STRING;
} else {
String msg = "linkType is invalid, linkTypeFromWeb: " + linkTypeFromWeb;
log.error( msg );
throw new Exception( msg );
}
linkTypesList.add( linkTypeDisplay );
linkTypesFromURLUpdated[ linkTypesFromURLUpdatedIndex ] = linkTypeRequestUpdated;
linkTypesFromURLUpdatedIndex++;
}
linkTypesFromURL = linkTypesFromURLUpdated;
qcPageQueryJSONRoot.setLinkTypes( linkTypesFromURLUpdated );
}
///////////////////////////////////////////////////
// Get LinkTypes for DB query - Sets to null when all selected as an optimization
String[] linkTypesForDBQuery = GetLinkTypesForSearchers.getInstance().getLinkTypesForSearchers( qcPageQueryJSONRoot.getLinkTypes() );
// Mods for DB Query
String[] modsForDBQuery = qcPageQueryJSONRoot.getMods();
List<Integer> includeProteinSeqVIdsDecodedArray = qcPageQueryJSONRoot.getIncludeProteinSeqVIdsDecodedArray();
////////////
///// Searcher cutoffs for all searches
CutoffValuesRootLevel cutoffValuesRootLevel = qcPageQueryJSONRoot.getCutoffs();
Z_CutoffValuesObjectsToOtherObjects_RootResult cutoffValuesObjectsToOtherObjects_RootResult =
Z_CutoffValuesObjectsToOtherObjectsFactory
.createSearcherCutoffValuesRootLevel( searchIds, cutoffValuesRootLevel );
SearcherCutoffValuesRootLevel searcherCutoffValuesRootLevel =
cutoffValuesObjectsToOtherObjects_RootResult.getSearcherCutoffValuesRootLevel();
// Populate countForLinkType_ByLinkType for selected link types
if ( qcPageQueryJSONRoot.getLinkTypes() == null || qcPageQueryJSONRoot.getLinkTypes().length == 0 ) {
String msg = "At least one linkType is required";
log.error( msg );
throw new Exception( msg );
}
PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Method_Response methodResponse = new PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Method_Response();
// Get Lists of preMZ mapped by search id then link type
// Map<[link type],Map<[Search id]>,List<[preMZ]>>>
Map<String,Map<Integer,List<BigDecimal>>> allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType =
getAllSearchesCombinedPreMZList_Map_KeyedOnLinkType(searches, linkTypesForDBQuery, modsForDBQuery, includeProteinSeqVIdsDecodedArray, searcherCutoffValuesRootLevel);
methodResponse.allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType = allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType;
if ( forDownload == ForDownload_Enum.YES ) {
return methodResponse; // EARLY RETURN
}
PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Results preMZ_Chart_For_PSMPeptideCutoffs_Merged_Results =
getPerChartData_KeyedOnLinkType(
allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType,
linkTypesList,
searches );
methodResponse.preMZ_Chart_For_PSMPeptideCutoffs_Merged_Results = preMZ_Chart_For_PSMPeptideCutoffs_Merged_Results;
return methodResponse;
}
/**
* @param allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType
* @param linkTypesList
* @param searchIdsListDeduppedSorted
* @return
* @throws ProxlWebappInternalErrorException
*/
private PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Results getPerChartData_KeyedOnLinkType(
Map<String,Map<Integer,List<BigDecimal>>> allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType,
List<String> linkTypesList,
List<SearchDTO> searches ) throws ProxlWebappInternalErrorException {
List<PreMZ_Chart_For_PSMPeptideCutoffsResultsForLinkType> dataForChartPerLinkTypeList = new ArrayList<>( linkTypesList.size() );
for ( String linkType : linkTypesList ) {
Map<Integer,List<BigDecimal>> allSearchesCombinedPreMZList_Map_KeyedOnSearchId =
allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType.get( linkType );
if ( allSearchesCombinedPreMZList_Map_KeyedOnSearchId == null ) {
PreMZ_Chart_For_PSMPeptideCutoffsResultsForLinkType resultForLinkType = new PreMZ_Chart_For_PSMPeptideCutoffsResultsForLinkType();
resultForLinkType.setLinkType( linkType );
resultForLinkType.setDataFound( false );
dataForChartPerLinkTypeList.add( resultForLinkType );
} else {
PreMZ_Chart_For_PSMPeptideCutoffsResultsForLinkType resultForLinkType =
getSingleChartData_ForLinkType( allSearchesCombinedPreMZList_Map_KeyedOnSearchId, searches );
resultForLinkType.setLinkType( linkType );
dataForChartPerLinkTypeList.add( resultForLinkType );
}
}
PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Results results = new PreMZ_Chart_For_PSMPeptideCutoffs_Merged_Results();
results.setDataForChartPerLinkTypeList( dataForChartPerLinkTypeList );
return results;
}
/**
* @param allSearchesCombinedPreMZList_Map_KeyedOnSearchId
* @throws ProxlWebappInternalErrorException
*/
private PreMZ_Chart_For_PSMPeptideCutoffsResultsForLinkType getSingleChartData_ForLinkType(
Map<Integer,List<BigDecimal>> allSearchesCombinedPreMZList_Map_KeyedOnSearchId,
List<SearchDTO> searches ) throws ProxlWebappInternalErrorException {
// Create output object for creating a chart
boolean dataFound = false;
Map<Integer, PreMZ_Chart_For_PSMPeptideCutoffsResultsForSearchId> dataForChartPerSearchIdMap_KeyProjectSearchId = new HashMap<>();
for ( SearchDTO search : searches ) {
List<BigDecimal> preMZList = allSearchesCombinedPreMZList_Map_KeyedOnSearchId.get( search.getSearchId() );
if ( preMZList == null ) {
PreMZ_Chart_For_PSMPeptideCutoffsResultsForSearchId resultForSearchId = new PreMZ_Chart_For_PSMPeptideCutoffsResultsForSearchId();
resultForSearchId.setSearchId( search.getSearchId() );
resultForSearchId.setDataFound( false );
dataForChartPerSearchIdMap_KeyProjectSearchId.put( search.getProjectSearchId(), resultForSearchId );
} else {
dataFound = true;
PreMZ_Chart_For_PSMPeptideCutoffsResultsForSearchId resultForSearchId =
getSingleChartData_ForSearchId( preMZList );
resultForSearchId.setSearchId( search.getSearchId() );
resultForSearchId.setDataFound( true );
dataForChartPerSearchIdMap_KeyProjectSearchId.put( search.getProjectSearchId(), resultForSearchId );
}
}
PreMZ_Chart_For_PSMPeptideCutoffsResultsForLinkType result = new PreMZ_Chart_For_PSMPeptideCutoffsResultsForLinkType();
result.setDataForChartPerSearchIdMap_KeyProjectSearchId( dataForChartPerSearchIdMap_KeyProjectSearchId );
result.setDataFound( dataFound );
return result;
}
/**
* @param allSearchesCombinedPreMZList_Map_KeyedOnSearchId
* @throws ProxlWebappInternalErrorException
*/
private PreMZ_Chart_For_PSMPeptideCutoffsResultsForSearchId getSingleChartData_ForSearchId( List<BigDecimal> preMZList ) throws ProxlWebappInternalErrorException {
List<Double> values = new ArrayList<>( preMZList.size() );
for( BigDecimal value : preMZList ) {
values.add( value.doubleValue() );
}
GetBoxPlotValuesResult getBoxPlotValuesResult =
BoxPlotUtils.getInstance().getBoxPlotValues( values );
double chartIntervalMax = getBoxPlotValuesResult.getChartIntervalMax();
double chartIntervalMin = getBoxPlotValuesResult.getChartIntervalMin();
// Add the preMZ that are not within max (highcutoff) and min (lowcutoff) to a list and send to web app
List<Double> preMZ_outliers = new ArrayList<Double>( preMZList.size() );
for( BigDecimal preMZ : preMZList ) {
double preMZDouble = preMZ.doubleValue();
if ( preMZDouble < chartIntervalMin || preMZDouble > chartIntervalMax ) {
preMZ_outliers.add( preMZDouble );
}
}
PreMZ_Chart_For_PSMPeptideCutoffsResultsForSearchId result = new PreMZ_Chart_For_PSMPeptideCutoffsResultsForSearchId();
result.setChartIntervalMax( getBoxPlotValuesResult.getChartIntervalMax() );
result.setChartIntervalMin( getBoxPlotValuesResult.getChartIntervalMin() );
result.setFirstQuartile( getBoxPlotValuesResult.getFirstQuartile() );
result.setThirdQuartile( getBoxPlotValuesResult.getThirdQuartile() );
result.setMedian( getBoxPlotValuesResult.getMedian() );
result.setPreMZ_outliers( preMZ_outliers );
result.setDataFound(true);
return result;
}
///////////////////////////////////
/**
* Get Lists of preMZ mapped by search id then link type
* @param searches
* @param linkTypesForDBQuery
* @param modsForDBQuery
* @param searcherCutoffValuesRootLevel
* @throws ProxlWebappDataException
* @throws Exception
*/
private Map<String,Map<Integer,List<BigDecimal>>> getAllSearchesCombinedPreMZList_Map_KeyedOnLinkType(
List<SearchDTO> searches,
String[] linkTypesForDBQuery,
String[] modsForDBQuery,
List<Integer> includeProteinSeqVIdsDecodedArray,
SearcherCutoffValuesRootLevel searcherCutoffValuesRootLevel) throws ProxlWebappDataException, Exception {
// Get Lists of preMZ mapped by search id then link type
/**
* Map <{Link Type},,Map<<Search id>,List<{preMZ}>>>
*/
Map<String,Map<Integer,List<BigDecimal>>> allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType = new HashMap<>();
for ( SearchDTO searchDTO : searches ) {
Integer projectSearchId = searchDTO.getProjectSearchId();
Integer searchId = searchDTO.getSearchId();
// Get cutoffs for this project search id
SearcherCutoffValuesSearchLevel searcherCutoffValuesSearchLevel =
searcherCutoffValuesRootLevel.getPerSearchCutoffs( projectSearchId );
if ( searcherCutoffValuesSearchLevel == null ) {
String msg = "searcherCutoffValuesRootLevel.getPerSearchCutoffs(projectSearchId) returned null for: " + projectSearchId;
log.error( msg );
throw new ProxlWebappDataException( msg );
}
PreMZ_For_PSMPeptideCutoffsResult preMZ_For_PSMPeptideCutoffsResult =
PreMZ_For_PSMPeptideCutoffsSearcher.getInstance()
.getPreMZ_For_PSMPeptideCutoffs( searchId, searcherCutoffValuesSearchLevel, linkTypesForDBQuery, modsForDBQuery, includeProteinSeqVIdsDecodedArray );
/**
* Map <{Link Type},List<{preMZ}>>
*/
Map<String,List<BigDecimal>> preMZList_Map_KeyedOnLinkType =
preMZ_For_PSMPeptideCutoffsResult.getResultsPreMZList_Map_KeyedOnLinkType();
// Link Type includes 'dimer' which has be combined with 'unlinked'
combineDimerListIntoUnlinkedList( preMZList_Map_KeyedOnLinkType );
for ( Map.Entry<String,List<BigDecimal>> preMZList_Map_Entry : preMZList_Map_KeyedOnLinkType.entrySet() ) {
String linkType = preMZList_Map_Entry.getKey();
Map<Integer,List<BigDecimal>> allSearchesCombinedPreMZList_Map_KeyedOnSearchId =
allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType.get( linkType );
if ( allSearchesCombinedPreMZList_Map_KeyedOnSearchId == null ) {
allSearchesCombinedPreMZList_Map_KeyedOnSearchId = new HashMap<>();
allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType.put( linkType, allSearchesCombinedPreMZList_Map_KeyedOnSearchId );
}
allSearchesCombinedPreMZList_Map_KeyedOnSearchId.put( searchId, preMZList_Map_Entry.getValue() );
}
}
return allSearchesCombinedPreMZList_Map_KeyedOnSearchId_KeyedOnLinkType;
}
/**
* Combine Dimer Counts Into Unlinked Counts
* @param chargeCountMap_KeyedOnLinkType_KeyedOnChargeValue
*/
private void combineDimerListIntoUnlinkedList( Map<String,List<BigDecimal>> preMZList_Map_KeyedOnLinkType ) {
List<BigDecimal> dimerValuesList = preMZList_Map_KeyedOnLinkType.get( XLinkUtils.DIMER_TYPE_STRING );
if ( dimerValuesList == null ) {
// No Dimer values so skip
return; // EARLY EXIT
}
List<BigDecimal> unlinkedValuesList = preMZList_Map_KeyedOnLinkType.get( XLinkUtils.UNLINKED_TYPE_STRING );
if ( unlinkedValuesList == null ) {
// No Unlinked values so simply copy dimer to unlinked and remove dimer
preMZList_Map_KeyedOnLinkType.put( XLinkUtils.UNLINKED_TYPE_STRING, dimerValuesList );
preMZList_Map_KeyedOnLinkType.remove( XLinkUtils.DIMER_TYPE_STRING );
return; // EARLY EXIT
}
unlinkedValuesList.addAll( dimerValuesList );
preMZList_Map_KeyedOnLinkType.remove( XLinkUtils.DIMER_TYPE_STRING );
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.translog;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.store.NativeFSLockFactory;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.MockEngineFactoryPlugin;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.recovery.RecoveryState;
import org.elasticsearch.monitor.fs.FsInfo;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.test.engine.MockEngineSupport;
import org.elasticsearch.test.transport.MockTransportService;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.notNullValue;
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 0)
public class TruncateTranslogIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(MockTransportService.TestPlugin.class, MockEngineFactoryPlugin.class);
}
public void testCorruptTranslogTruncation() throws Exception {
internalCluster().startNodes(2, Settings.EMPTY);
final String replicaNode = internalCluster().getNodeNames()[1];
assertAcked(prepareCreate("test").setSettings(Settings.builder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 1)
.put("index.refresh_interval", "-1")
.put(MockEngineSupport.DISABLE_FLUSH_ON_CLOSE.getKey(), true) // never flush - always recover from translog
.put("index.routing.allocation.exclude._name", replicaNode)
));
ensureYellow();
assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder()
.put("index.routing.allocation.exclude._name", (String)null)
));
ensureGreen();
// Index some documents
int numDocsToKeep = randomIntBetween(0, 100);
logger.info("--> indexing [{}] docs to be kept", numDocsToKeep);
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocsToKeep];
for (int i = 0; i < builders.length; i++) {
builders[i] = client().prepareIndex("test", "type").setSource("foo", "bar");
}
indexRandom(false, false, false, Arrays.asList(builders));
flush("test");
disableTranslogFlush("test");
// having no extra docs is an interesting case for seq no based recoveries - test it more often
int numDocsToTruncate = randomBoolean() ? 0 : randomIntBetween(0, 100);
logger.info("--> indexing [{}] more doc to be truncated", numDocsToTruncate);
builders = new IndexRequestBuilder[numDocsToTruncate];
for (int i = 0; i < builders.length; i++) {
builders[i] = client().prepareIndex("test", "type").setSource("foo", "bar");
}
indexRandom(false, false, false, Arrays.asList(builders));
Set<Path> translogDirs = getTranslogDirs("test");
TruncateTranslogCommand ttc = new TruncateTranslogCommand();
MockTerminal t = new MockTerminal();
OptionParser parser = ttc.getParser();
for (Path translogDir : translogDirs) {
OptionSet options = parser.parse("-d", translogDir.toAbsolutePath().toString(), "-b");
// Try running it before the shard is closed, it should flip out because it can't acquire the lock
try {
logger.info("--> running truncate while index is open on [{}]", translogDir.toAbsolutePath());
ttc.execute(t, options, null /* TODO: env should be real here, and ttc should actually use it... */);
fail("expected the truncate command to fail not being able to acquire the lock");
} catch (Exception e) {
assertThat(e.getMessage(), containsString("Failed to lock shard's directory"));
}
}
if (randomBoolean() && numDocsToTruncate > 0) {
// flush the replica, so it will have more docs than what the primary will have
Index index = resolveIndex("test");
IndexShard replica = internalCluster().getInstance(IndicesService.class, replicaNode).getShardOrNull(new ShardId(index, 0));
replica.flush(new FlushRequest());
logger.info("--> performed extra flushing on replica");
}
// shut down the replica node to be tested later
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNode));
// Corrupt the translog file(s)
logger.info("--> corrupting translog");
corruptRandomTranslogFiles("test");
// Restart the single node
logger.info("--> restarting node");
internalCluster().restartRandomDataNode();
client().admin().cluster().prepareHealth().setWaitForYellowStatus()
.setTimeout(new TimeValue(1000, TimeUnit.MILLISECONDS))
.setWaitForEvents(Priority.LANGUID)
.get();
try {
client().prepareSearch("test").setQuery(matchAllQuery()).get();
fail("all shards should be failed due to a corrupted translog");
} catch (SearchPhaseExecutionException e) {
// Good, all shards should be failed because there is only a
// single shard and its translog is corrupt
}
// Close the index so we can actually truncate the translog
logger.info("--> closing 'test' index");
client().admin().indices().prepareClose("test").get();
for (Path translogDir : translogDirs) {
final Path idxLocation = translogDir.getParent().resolve("index");
assertBusy(() -> {
logger.info("--> checking that lock has been released for {}", idxLocation);
try (Directory dir = FSDirectory.open(idxLocation, NativeFSLockFactory.INSTANCE);
Lock writeLock = dir.obtainLock(IndexWriter.WRITE_LOCK_NAME)) {
// Great, do nothing, we just wanted to obtain the lock
} catch (LockObtainFailedException lofe) {
logger.info("--> failed acquiring lock for {}", idxLocation);
fail("still waiting for lock release at [" + idxLocation + "]");
} catch (IOException ioe) {
fail("Got an IOException: " + ioe);
}
});
OptionSet options = parser.parse("-d", translogDir.toAbsolutePath().toString(), "-b");
logger.info("--> running truncate translog command for [{}]", translogDir.toAbsolutePath());
ttc.execute(t, options, null /* TODO: env should be real here, and ttc should actually use it... */);
logger.info("--> output:\n{}", t.getOutput());
}
// Re-open index
logger.info("--> opening 'test' index");
client().admin().indices().prepareOpen("test").get();
ensureYellow("test");
// Run a search and make sure it succeeds
assertHitCount(client().prepareSearch("test").setQuery(matchAllQuery()).get(), numDocsToKeep);
logger.info("--> starting the replica node to test recovery");
internalCluster().startNode();
ensureGreen("test");
assertHitCount(client().prepareSearch("test").setPreference("_replica").setQuery(matchAllQuery()).get(), numDocsToKeep);
final RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries("test").setActiveOnly(false).get();
final RecoveryState replicaRecoveryState = recoveryResponse.shardRecoveryStates().get("test").stream()
.filter(recoveryState -> recoveryState.getPrimary() == false).findFirst().get();
assertThat(replicaRecoveryState.getIndex().toString(), replicaRecoveryState.getIndex().recoveredFileCount(), greaterThan(0));
}
public void testCorruptTranslogTruncationOfReplica() throws Exception {
internalCluster().startNodes(2, Settings.EMPTY);
final String primaryNode = internalCluster().getNodeNames()[0];
final String replicaNode = internalCluster().getNodeNames()[1];
assertAcked(prepareCreate("test").setSettings(Settings.builder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 1)
.put("index.refresh_interval", "-1")
.put(MockEngineSupport.DISABLE_FLUSH_ON_CLOSE.getKey(), true) // never flush - always recover from translog
.put("index.routing.allocation.exclude._name", replicaNode)
));
ensureYellow();
assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder()
.put("index.routing.allocation.exclude._name", (String)null)
));
ensureGreen();
// Index some documents
int numDocsToKeep = randomIntBetween(0, 100);
logger.info("--> indexing [{}] docs to be kept", numDocsToKeep);
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocsToKeep];
for (int i = 0; i < builders.length; i++) {
builders[i] = client().prepareIndex("test", "type").setSource("foo", "bar");
}
indexRandom(false, false, false, Arrays.asList(builders));
flush("test");
disableTranslogFlush("test");
// having no extra docs is an interesting case for seq no based recoveries - test it more often
int numDocsToTruncate = randomBoolean() ? 0 : randomIntBetween(0, 100);
logger.info("--> indexing [{}] more docs to be truncated", numDocsToTruncate);
builders = new IndexRequestBuilder[numDocsToTruncate];
for (int i = 0; i < builders.length; i++) {
builders[i] = client().prepareIndex("test", "type").setSource("foo", "bar");
}
indexRandom(false, false, false, Arrays.asList(builders));
final int totalDocs = numDocsToKeep + numDocsToTruncate;
// sample the replica node translog dirs
final ShardId shardId = new ShardId(resolveIndex("test"), 0);
Set<Path> translogDirs = getTranslogDirs(replicaNode, shardId);
// stop the cluster nodes. we don't use full restart so the node start up order will be the same
// and shard roles will be maintained
internalCluster().stopRandomDataNode();
internalCluster().stopRandomDataNode();
// Corrupt the translog file(s)
logger.info("--> corrupting translog");
corruptTranslogFiles(translogDirs);
// Restart the single node
logger.info("--> starting node");
internalCluster().startNode();
ensureYellow();
// Run a search and make sure it succeeds
assertHitCount(client().prepareSearch("test").setQuery(matchAllQuery()).get(), totalDocs);
TruncateTranslogCommand ttc = new TruncateTranslogCommand();
MockTerminal t = new MockTerminal();
OptionParser parser = ttc.getParser();
for (Path translogDir : translogDirs) {
final Path idxLocation = translogDir.getParent().resolve("index");
assertBusy(() -> {
logger.info("--> checking that lock has been released for {}", idxLocation);
try (Directory dir = FSDirectory.open(idxLocation, NativeFSLockFactory.INSTANCE);
Lock writeLock = dir.obtainLock(IndexWriter.WRITE_LOCK_NAME)) {
// Great, do nothing, we just wanted to obtain the lock
} catch (LockObtainFailedException lofe) {
logger.info("--> failed acquiring lock for {}", idxLocation);
fail("still waiting for lock release at [" + idxLocation + "]");
} catch (IOException ioe) {
fail("Got an IOException: " + ioe);
}
});
OptionSet options = parser.parse("-d", translogDir.toAbsolutePath().toString(), "-b");
logger.info("--> running truncate translog command for [{}]", translogDir.toAbsolutePath());
ttc.execute(t, options, null /* TODO: env should be real here, and ttc should actually use it... */);
logger.info("--> output:\n{}", t.getOutput());
}
logger.info("--> starting the replica node to test recovery");
internalCluster().startNode();
ensureGreen("test");
assertHitCount(client().prepareSearch("test").setPreference("_replica").setQuery(matchAllQuery()).get(), totalDocs);
final RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries("test").setActiveOnly(false).get();
final RecoveryState replicaRecoveryState = recoveryResponse.shardRecoveryStates().get("test").stream()
.filter(recoveryState -> recoveryState.getPrimary() == false).findFirst().get();
// the replica translog was disabled so it doesn't know what hte global checkpoint is and thus can't do ops based recovery
assertThat(replicaRecoveryState.getIndex().toString(), replicaRecoveryState.getIndex().recoveredFileCount(), greaterThan(0));
}
private Set<Path> getTranslogDirs(String indexName) throws IOException {
ClusterState state = client().admin().cluster().prepareState().get().getState();
GroupShardsIterator shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[]{indexName}, false);
List<ShardIterator> iterators = iterableAsArrayList(shardIterators);
ShardIterator shardIterator = RandomPicks.randomFrom(random(), iterators);
ShardRouting shardRouting = shardIterator.nextOrNull();
assertNotNull(shardRouting);
assertTrue(shardRouting.primary());
assertTrue(shardRouting.assignedToNode());
String nodeId = shardRouting.currentNodeId();
ShardId shardId = shardRouting.shardId();
return getTranslogDirs(nodeId, shardId);
}
private Set<Path> getTranslogDirs(String nodeId, ShardId shardId) {
NodesStatsResponse nodeStatses = client().admin().cluster().prepareNodesStats(nodeId).setFs(true).get();
Set<Path> translogDirs = new TreeSet<>(); // treeset makes sure iteration order is deterministic
for (FsInfo.Path fsPath : nodeStatses.getNodes().get(0).getFs()) {
String path = fsPath.getPath();
final String relativeDataLocationPath = "indices/"+ shardId.getIndex().getUUID() +"/" + Integer.toString(shardId.getId())
+ "/translog";
Path translogPath = PathUtils.get(path).resolve(relativeDataLocationPath);
if (Files.isDirectory(translogPath)) {
translogDirs.add(translogPath);
}
}
return translogDirs;
}
private void corruptRandomTranslogFiles(String indexName) throws IOException {
Set<Path> translogDirs = getTranslogDirs(indexName);
corruptTranslogFiles(translogDirs);
}
private void corruptTranslogFiles(Set<Path> translogDirs) throws IOException {
Set<Path> files = new TreeSet<>(); // treeset makes sure iteration order is deterministic
for (Path translogDir : translogDirs) {
if (Files.isDirectory(translogDir)) {
logger.info("--> path: {}", translogDir);
try (DirectoryStream<Path> stream = Files.newDirectoryStream(translogDir)) {
for (Path item : stream) {
logger.info("--> File: {}", item);
if (Files.isRegularFile(item) && item.getFileName().toString().startsWith("translog-")) {
files.add(item);
}
}
}
}
}
Path fileToCorrupt = null;
if (!files.isEmpty()) {
int corruptions = randomIntBetween(5, 20);
for (int i = 0; i < corruptions; i++) {
fileToCorrupt = RandomPicks.randomFrom(random(), files);
try (FileChannel raf = FileChannel.open(fileToCorrupt, StandardOpenOption.READ, StandardOpenOption.WRITE)) {
// read
raf.position(randomIntBetween(0, (int) Math.min(Integer.MAX_VALUE, raf.size() - 1)));
long filePointer = raf.position();
ByteBuffer bb = ByteBuffer.wrap(new byte[1]);
raf.read(bb);
bb.flip();
// corrupt
byte oldValue = bb.get(0);
byte newValue = (byte) (oldValue + 1);
bb.put(0, newValue);
// rewrite
raf.position(filePointer);
raf.write(bb);
logger.info("--> corrupting file {} -- flipping at position {} from {} to {} file: {}",
fileToCorrupt, filePointer, Integer.toHexString(oldValue),
Integer.toHexString(newValue), fileToCorrupt);
}
}
}
assertThat("no file corrupted", fileToCorrupt, notNullValue());
}
/** Disables translog flushing for the specified index */
private static void disableTranslogFlush(String index) {
Settings settings = Settings.builder()
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB))
.build();
client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get();
}
}
| |
// Copyright (c) 2006 Damien Miller <djm@mindrot.org>
//
// Permission to use, copy, modify, and distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
package nz.co.lolnet.lolnetachievements.Utility;
import java.io.UnsupportedEncodingException;
import java.security.SecureRandom;
/**
* BCrypt implements OpenBSD-style Blowfish password hashing using
* the scheme described in "A Future-Adaptable Password Scheme" by
* Niels Provos and David Mazieres.
* <p>
* This password hashing system tries to thwart off-line password
* cracking using a computationally-intensive hashing algorithm,
* based on Bruce Schneier's Blowfish cipher. The work factor of
* the algorithm is parameterised, so it can be increased as
* computers get faster.
* <p>
* Usage is really simple. To hash a password for the first time,
* call the hashpw method with a random salt, like this:
* <p>
* <code>
* String pw_hash = BCrypt.hashpw(plain_password, BCrypt.gensalt()); <br />
* </code>
* <p>
* To check whether a plaintext password matches one that has been
* hashed previously, use the checkpw method:
* <p>
* <code>
* if (BCrypt.checkpw(candidate_password, stored_hash))<br />
* System.out.println("It matches");<br />
* else<br />
* System.out.println("It does not match");<br />
* </code>
* <p>
* The gensalt() method takes an optional parameter (log_rounds)
* that determines the computational complexity of the hashing:
* <p>
* <code>
* String strong_salt = BCrypt.gensalt(10)<br />
* String stronger_salt = BCrypt.gensalt(12)<br />
* </code>
* <p>
* The amount of work increases exponentially (2**log_rounds), so
* each increment is twice as much work. The default log_rounds is
* 10, and the valid range is 4 to 30.
*
* @author Damien Miller
* @version 0.2
*/
public class BCrypt {
// BCrypt parameters
private static final int GENSALT_DEFAULT_LOG2_ROUNDS = 10;
private static final int BCRYPT_SALT_LEN = 16;
// Blowfish parameters
private static final int BLOWFISH_NUM_ROUNDS = 16;
// Initial contents of key schedule
private static final int P_orig[] = {
0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344,
0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89,
0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c,
0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917,
0x9216d5d9, 0x8979fb1b
};
private static final int S_orig[] = {
0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7,
0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99,
0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16,
0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e,
0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee,
0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013,
0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef,
0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e,
0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60,
0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440,
0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce,
0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a,
0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e,
0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677,
0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193,
0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032,
0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88,
0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239,
0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e,
0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0,
0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3,
0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98,
0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88,
0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe,
0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6,
0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d,
0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b,
0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7,
0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba,
0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463,
0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f,
0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09,
0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3,
0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb,
0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279,
0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8,
0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab,
0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82,
0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db,
0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573,
0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0,
0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b,
0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790,
0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8,
0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4,
0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0,
0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7,
0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c,
0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad,
0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1,
0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299,
0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9,
0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477,
0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf,
0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49,
0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af,
0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa,
0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5,
0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41,
0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915,
0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400,
0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915,
0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664,
0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a,
0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623,
0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266,
0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1,
0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e,
0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6,
0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1,
0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e,
0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1,
0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737,
0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8,
0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff,
0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd,
0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701,
0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7,
0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41,
0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331,
0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf,
0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af,
0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e,
0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87,
0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c,
0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2,
0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16,
0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd,
0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b,
0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509,
0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e,
0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3,
0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f,
0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a,
0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4,
0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960,
0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66,
0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28,
0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802,
0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84,
0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510,
0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf,
0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14,
0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e,
0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50,
0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7,
0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8,
0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281,
0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99,
0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696,
0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128,
0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73,
0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0,
0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0,
0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105,
0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250,
0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3,
0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285,
0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00,
0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061,
0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb,
0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e,
0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735,
0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc,
0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9,
0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340,
0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20,
0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7,
0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934,
0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068,
0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af,
0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840,
0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45,
0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504,
0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a,
0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb,
0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee,
0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6,
0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42,
0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b,
0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2,
0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb,
0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527,
0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b,
0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33,
0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c,
0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3,
0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc,
0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17,
0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564,
0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b,
0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115,
0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922,
0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728,
0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0,
0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e,
0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37,
0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d,
0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804,
0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b,
0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3,
0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb,
0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d,
0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c,
0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350,
0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9,
0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a,
0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe,
0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d,
0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc,
0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f,
0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61,
0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2,
0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9,
0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2,
0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c,
0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e,
0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633,
0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10,
0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169,
0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52,
0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027,
0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5,
0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62,
0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634,
0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76,
0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24,
0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc,
0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4,
0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c,
0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837,
0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0,
0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b,
0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe,
0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b,
0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4,
0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8,
0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6,
0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304,
0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22,
0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4,
0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6,
0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9,
0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59,
0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593,
0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51,
0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28,
0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c,
0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b,
0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28,
0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c,
0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd,
0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a,
0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319,
0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb,
0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f,
0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991,
0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32,
0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680,
0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166,
0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae,
0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb,
0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5,
0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47,
0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370,
0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d,
0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84,
0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048,
0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8,
0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd,
0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9,
0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7,
0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38,
0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f,
0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c,
0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525,
0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1,
0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442,
0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964,
0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e,
0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8,
0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d,
0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f,
0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299,
0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02,
0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc,
0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614,
0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a,
0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6,
0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b,
0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0,
0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060,
0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e,
0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9,
0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f,
0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6
};
// bcrypt IV: "OrpheanBeholderScryDoubt". The C implementation calls
// this "ciphertext", but it is really plaintext or an IV. We keep
// the name to make code comparison easier.
static private final int bf_crypt_ciphertext[] = {
0x4f727068, 0x65616e42, 0x65686f6c,
0x64657253, 0x63727944, 0x6f756274
};
// Table for Base64 encoding
static private final char base64_code[] = {
'.', '/', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J',
'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V',
'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',
'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5',
'6', '7', '8', '9'
};
// Table for Base64 decoding
static private final byte index_64[] = {
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 0, 1, 54, 55,
56, 57, 58, 59, 60, 61, 62, 63, -1, -1,
-1, -1, -1, -1, -1, 2, 3, 4, 5, 6,
7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
-1, -1, -1, -1, -1, -1, 28, 29, 30,
31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
41, 42, 43, 44, 45, 46, 47, 48, 49, 50,
51, 52, 53, -1, -1, -1, -1, -1
};
// Expanded Blowfish key
private int P[];
private int S[];
/**
* Encode a byte array using bcrypt's slightly-modified base64
* encoding scheme. Note that this is *not* compatible with
* the standard MIME-base64 encoding.
*
* @param d the byte array to encode
* @param len the number of bytes to encode
* @return base64-encoded string
* @exception IllegalArgumentException if the length is invalid
*/
private static String encode_base64(byte d[], int len)
throws IllegalArgumentException {
int off = 0;
StringBuffer rs = new StringBuffer();
int c1, c2;
if (len <= 0 || len > d.length)
throw new IllegalArgumentException ("Invalid len");
while (off < len) {
c1 = d[off++] & 0xff;
rs.append(base64_code[(c1 >> 2) & 0x3f]);
c1 = (c1 & 0x03) << 4;
if (off >= len) {
rs.append(base64_code[c1 & 0x3f]);
break;
}
c2 = d[off++] & 0xff;
c1 |= (c2 >> 4) & 0x0f;
rs.append(base64_code[c1 & 0x3f]);
c1 = (c2 & 0x0f) << 2;
if (off >= len) {
rs.append(base64_code[c1 & 0x3f]);
break;
}
c2 = d[off++] & 0xff;
c1 |= (c2 >> 6) & 0x03;
rs.append(base64_code[c1 & 0x3f]);
rs.append(base64_code[c2 & 0x3f]);
}
return rs.toString();
}
/**
* Look up the 3 bits base64-encoded by the specified character,
* range-checking againt conversion table
* @param x the base64-encoded value
* @return the decoded value of x
*/
private static byte char64(char x) {
if ((int)x < 0 || (int)x > index_64.length)
return -1;
return index_64[(int)x];
}
/**
* Decode a string encoded using bcrypt's base64 scheme to a
* byte array. Note that this is *not* compatible with
* the standard MIME-base64 encoding.
* @param s the string to decode
* @param maxolen the maximum number of bytes to decode
* @return an array containing the decoded bytes
* @throws IllegalArgumentException if maxolen is invalid
*/
private static byte[] decode_base64(String s, int maxolen)
throws IllegalArgumentException {
StringBuffer rs = new StringBuffer();
int off = 0, slen = s.length(), olen = 0;
byte ret[];
byte c1, c2, c3, c4, o;
if (maxolen <= 0)
throw new IllegalArgumentException ("Invalid maxolen");
while (off < slen - 1 && olen < maxolen) {
c1 = char64(s.charAt(off++));
c2 = char64(s.charAt(off++));
if (c1 == -1 || c2 == -1)
break;
o = (byte)(c1 << 2);
o |= (c2 & 0x30) >> 4;
rs.append((char)o);
if (++olen >= maxolen || off >= slen)
break;
c3 = char64(s.charAt(off++));
if (c3 == -1)
break;
o = (byte)((c2 & 0x0f) << 4);
o |= (c3 & 0x3c) >> 2;
rs.append((char)o);
if (++olen >= maxolen || off >= slen)
break;
c4 = char64(s.charAt(off++));
o = (byte)((c3 & 0x03) << 6);
o |= c4;
rs.append((char)o);
++olen;
}
ret = new byte[olen];
for (off = 0; off < olen; off++)
ret[off] = (byte)rs.charAt(off);
return ret;
}
/**
* Blowfish encipher a single 64-bit block encoded as
* two 32-bit halves
* @param lr an array containing the two 32-bit half blocks
* @param off the position in the array of the blocks
*/
private final void encipher(int lr[], int off) {
int i, n, l = lr[off], r = lr[off + 1];
l ^= P[0];
for (i = 0; i <= BLOWFISH_NUM_ROUNDS - 2;) {
// Feistel substitution on left word
n = S[(l >> 24) & 0xff];
n += S[0x100 | ((l >> 16) & 0xff)];
n ^= S[0x200 | ((l >> 8) & 0xff)];
n += S[0x300 | (l & 0xff)];
r ^= n ^ P[++i];
// Feistel substitution on right word
n = S[(r >> 24) & 0xff];
n += S[0x100 | ((r >> 16) & 0xff)];
n ^= S[0x200 | ((r >> 8) & 0xff)];
n += S[0x300 | (r & 0xff)];
l ^= n ^ P[++i];
}
lr[off] = r ^ P[BLOWFISH_NUM_ROUNDS + 1];
lr[off + 1] = l;
}
/**
* Cycically extract a word of key material
* @param data the string to extract the data from
* @param offp a "pointer" (as a one-entry array) to the
* current offset into data
* @return the next word of material from data
*/
private static int streamtoword(byte data[], int offp[]) {
int i;
int word = 0;
int off = offp[0];
for (i = 0; i < 4; i++) {
word = (word << 8) | (data[off] & 0xff);
off = (off + 1) % data.length;
}
offp[0] = off;
return word;
}
/**
* Initialise the Blowfish key schedule
*/
private void init_key() {
P = (int[])P_orig.clone();
S = (int[])S_orig.clone();
}
/**
* Key the Blowfish cipher
* @param key an array containing the key
*/
private void key(byte key[]) {
int i;
int koffp[] = { 0 };
int lr[] = { 0, 0 };
int plen = P.length, slen = S.length;
for (i = 0; i < plen; i++)
P[i] = P[i] ^ streamtoword(key, koffp);
for (i = 0; i < plen; i += 2) {
encipher(lr, 0);
P[i] = lr[0];
P[i + 1] = lr[1];
}
for (i = 0; i < slen; i += 2) {
encipher(lr, 0);
S[i] = lr[0];
S[i + 1] = lr[1];
}
}
/**
* Perform the "enhanced key schedule" step described by
* Provos and Mazieres in "A Future-Adaptable Password Scheme"
* http://www.openbsd.org/papers/bcrypt-paper.ps
* @param data salt information
* @param key password information
*/
private void ekskey(byte data[], byte key[]) {
int i;
int koffp[] = { 0 }, doffp[] = { 0 };
int lr[] = { 0, 0 };
int plen = P.length, slen = S.length;
for (i = 0; i < plen; i++)
P[i] = P[i] ^ streamtoword(key, koffp);
for (i = 0; i < plen; i += 2) {
lr[0] ^= streamtoword(data, doffp);
lr[1] ^= streamtoword(data, doffp);
encipher(lr, 0);
P[i] = lr[0];
P[i + 1] = lr[1];
}
for (i = 0; i < slen; i += 2) {
lr[0] ^= streamtoword(data, doffp);
lr[1] ^= streamtoword(data, doffp);
encipher(lr, 0);
S[i] = lr[0];
S[i + 1] = lr[1];
}
}
/**
* Perform the central password hashing step in the
* bcrypt scheme
* @param password the password to hash
* @param salt the binary salt to hash with the password
* @param log_rounds the binary logarithm of the number
* of rounds of hashing to apply
* @param cdata the plaintext to encrypt
* @return an array containing the binary hashed password
*/
public byte[] crypt_raw(byte password[], byte salt[], int log_rounds,
int cdata[]) {
int rounds, i, j;
int clen = cdata.length;
byte ret[];
if (log_rounds < 4 || log_rounds > 30)
throw new IllegalArgumentException ("Bad number of rounds");
rounds = 1 << log_rounds;
if (salt.length != BCRYPT_SALT_LEN)
throw new IllegalArgumentException ("Bad salt length");
init_key();
ekskey(salt, password);
for (i = 0; i != rounds; i++) {
key(password);
key(salt);
}
for (i = 0; i < 64; i++) {
for (j = 0; j < (clen >> 1); j++)
encipher(cdata, j << 1);
}
ret = new byte[clen * 4];
for (i = 0, j = 0; i < clen; i++) {
ret[j++] = (byte)((cdata[i] >> 24) & 0xff);
ret[j++] = (byte)((cdata[i] >> 16) & 0xff);
ret[j++] = (byte)((cdata[i] >> 8) & 0xff);
ret[j++] = (byte)(cdata[i] & 0xff);
}
return ret;
}
/**
* Hash a password using the OpenBSD bcrypt scheme
* @param password the password to hash
* @param salt the salt to hash with (perhaps generated
* using BCrypt.gensalt)
* @return the hashed password
*/
public static String hashpw(String password, String salt) {
BCrypt B;
String real_salt;
byte passwordb[], saltb[], hashed[];
char minor = (char)0;
int rounds, off = 0;
StringBuffer rs = new StringBuffer();
if (salt.charAt(0) != '$' || salt.charAt(1) != '2')
throw new IllegalArgumentException ("Invalid salt version");
if (salt.charAt(2) == '$')
off = 3;
else {
minor = salt.charAt(2);
if (minor != 'a' || salt.charAt(3) != '$')
throw new IllegalArgumentException ("Invalid salt revision");
off = 4;
}
// Extract number of rounds
if (salt.charAt(off + 2) > '$')
throw new IllegalArgumentException ("Missing salt rounds");
rounds = Integer.parseInt(salt.substring(off, off + 2));
real_salt = salt.substring(off + 3, off + 25);
try {
passwordb = (password + (minor >= 'a' ? "\000" : "")).getBytes("UTF-8");
} catch (UnsupportedEncodingException uee) {
throw new AssertionError("UTF-8 is not supported");
}
saltb = decode_base64(real_salt, BCRYPT_SALT_LEN);
B = new BCrypt();
hashed = B.crypt_raw(passwordb, saltb, rounds,
(int[])bf_crypt_ciphertext.clone());
rs.append("$2");
if (minor >= 'a')
rs.append(minor);
rs.append("$");
if (rounds < 10)
rs.append("0");
if (rounds > 30) {
throw new IllegalArgumentException(
"rounds exceeds maximum (30)");
}
rs.append(Integer.toString(rounds));
rs.append("$");
rs.append(encode_base64(saltb, saltb.length));
rs.append(encode_base64(hashed,
bf_crypt_ciphertext.length * 4 - 1));
return rs.toString();
}
/**
* Generate a salt for use with the BCrypt.hashpw() method
* @param log_rounds the log2 of the number of rounds of
* hashing to apply - the work factor therefore increases as
* 2**log_rounds.
* @param random an instance of SecureRandom to use
* @return an encoded salt value
*/
public static String gensalt(int log_rounds, SecureRandom random) {
StringBuffer rs = new StringBuffer();
byte rnd[] = new byte[BCRYPT_SALT_LEN];
random.nextBytes(rnd);
rs.append("$2a$");
if (log_rounds < 10)
rs.append("0");
if (log_rounds > 30) {
throw new IllegalArgumentException(
"log_rounds exceeds maximum (30)");
}
rs.append(Integer.toString(log_rounds));
rs.append("$");
rs.append(encode_base64(rnd, rnd.length));
return rs.toString();
}
/**
* Generate a salt for use with the BCrypt.hashpw() method
* @param log_rounds the log2 of the number of rounds of
* hashing to apply - the work factor therefore increases as
* 2**log_rounds.
* @return an encoded salt value
*/
public static String gensalt(int log_rounds) {
return gensalt(log_rounds, new SecureRandom());
}
/**
* Generate a salt for use with the BCrypt.hashpw() method,
* selecting a reasonable default for the number of hashing
* rounds to apply
* @return an encoded salt value
*/
public static String gensalt() {
return gensalt(GENSALT_DEFAULT_LOG2_ROUNDS);
}
/**
* Check that a plaintext password matches a previously hashed
* one
* @param plaintext the plaintext password to verify
* @param hashed the previously-hashed password
* @return true if the passwords match, false otherwise
*/
public static boolean checkpw(String plaintext, String hashed) {
byte hashed_bytes[];
byte try_bytes[];
try {
String try_pw = hashpw(plaintext, hashed);
hashed_bytes = hashed.getBytes("UTF-8");
try_bytes = try_pw.getBytes("UTF-8");
} catch (UnsupportedEncodingException uee) {
return false;
}
if (hashed_bytes.length != try_bytes.length)
return false;
byte ret = 0;
for (int i = 0; i < try_bytes.length; i++)
ret |= hashed_bytes[i] ^ try_bytes[i];
return ret == 0;
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import static javax.tools.StandardLocation.CLASS_OUTPUT;
import com.facebook.buck.log.Logger;
import com.facebook.buck.zip.CustomZipEntry;
import com.facebook.buck.zip.JarBuilder;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URLEncoder;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.zip.ZipEntry;
import javax.tools.FileObject;
import javax.tools.ForwardingJavaFileManager;
import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
/**
* A {@link StandardJavaFileManager} that creates and writes the content of files directly into a
* Jar output stream instead of writing the files to disk.
*/
public class JavaInMemoryFileManager extends ForwardingJavaFileManager<StandardJavaFileManager>
implements StandardJavaFileManager {
private static final Logger LOG = Logger.get(JavaInMemoryFileManager.class);
private Path jarPath;
private StandardJavaFileManager delegate;
private Set<String> directoryPaths;
private Map<String, JarFileObject> fileForOutputPaths;
private RemoveClassesPatternsMatcher classesToRemoveFromJar;
private int FILENAME_LENGTH_LIMIT = 255;
public JavaInMemoryFileManager(
StandardJavaFileManager standardManager,
Path jarPath,
RemoveClassesPatternsMatcher classesToRemoveFromJar) {
super(standardManager);
this.delegate = standardManager;
this.jarPath = jarPath;
this.directoryPaths = new HashSet<>();
this.fileForOutputPaths = new HashMap<>();
this.classesToRemoveFromJar = classesToRemoveFromJar;
}
/**
* Creates a ZipEntry for placing in the jar output stream. Sets the modification time to 0 for a
* deterministic jar.
*
* @param name the name of the entry
* @return the zip entry for the file specified
*/
public static ZipEntry createEntry(String name) {
CustomZipEntry entry = new CustomZipEntry(name);
// We want deterministic JARs, so avoid mtimes.
entry.setFakeTime();
return entry;
}
private static String getPath(String className) {
return className.replace('.', '/');
}
private static String getPath(String className, JavaFileObject.Kind kind) {
return className.replace('.', '/') + kind.extension;
}
private static String getPath(String packageName, String relativeName) {
return !packageName.isEmpty()
? packageName.replace('.', '/') + '/' + relativeName
: relativeName;
}
@Override
public JavaFileObject getJavaFileForOutput(
Location location, String className, JavaFileObject.Kind kind, FileObject sibling)
throws IOException {
// Use the normal FileObject that writes to the disk for source files.
if (shouldDelegate(location)) {
return delegate.getJavaFileForOutput(location, className, kind, sibling);
}
String path = getPath(className, kind);
// Check that the filename does not exceed the filesystem limt
if (Paths.get(path).getFileName().toString().length() > FILENAME_LENGTH_LIMIT) {
throw new IOException(String.format("%s (File name too long)", path));
}
// If the class is to be removed from the Jar create a NoOp FileObject.
if (classesToRemoveFromJar.shouldRemoveClass(className)) {
LOG.info(
"%s was excluded from the Jar because it matched a remove_classes pattern.",
className.toString());
return getJavaNoOpFileObject(path, kind);
}
return getJavaMemoryFileObject(kind, path);
}
@Override
public FileObject getFileForOutput(
Location location, String packageName, String relativeName, FileObject sibling)
throws IOException {
if (shouldDelegate(location)) {
return delegate.getFileForOutput(location, packageName, relativeName, sibling);
}
String path = getPath(packageName, relativeName);
return getJavaMemoryFileObject(JavaFileObject.Kind.OTHER, path);
}
@Override
public boolean isSameFile(FileObject a, FileObject b) {
boolean aInMemoryJavaFileInstance = a instanceof JavaInMemoryFileObject;
boolean bInMemoryJavaFileInstance = b instanceof JavaInMemoryFileObject;
if (aInMemoryJavaFileInstance || bInMemoryJavaFileInstance) {
return aInMemoryJavaFileInstance
&& bInMemoryJavaFileInstance
&& a.getName().equals(b.getName());
}
return super.isSameFile(a, b);
}
@Override
public Iterable<? extends JavaFileObject> getJavaFileObjectsFromFiles(
Iterable<? extends File> files) {
return delegate.getJavaFileObjectsFromFiles(files);
}
@Override
public Iterable<? extends JavaFileObject> getJavaFileObjects(File... files) {
return delegate.getJavaFileObjects(files);
}
@Override
public Iterable<? extends JavaFileObject> getJavaFileObjectsFromStrings(Iterable<String> names) {
return delegate.getJavaFileObjectsFromStrings(names);
}
@Override
public Iterable<? extends JavaFileObject> getJavaFileObjects(String... names) {
return delegate.getJavaFileObjects(names);
}
@Override
public void setLocation(Location location, Iterable<? extends File> path) throws IOException {
delegate.setLocation(location, path);
}
@Override
public Iterable<? extends File> getLocation(Location location) {
return delegate.getLocation(location);
}
@Override
public Iterable<JavaFileObject> list(
Location location, String packageName, Set<JavaFileObject.Kind> kinds, boolean recurse)
throws IOException {
if (shouldDelegate(location)) {
return delegate.list(location, packageName, kinds, recurse);
}
ArrayList<JavaFileObject> results = new ArrayList<>();
for (JavaFileObject fromSuper : delegate.list(location, packageName, kinds, recurse)) {
results.add(fromSuper);
}
String packageDirPath = getPath(packageName) + '/';
for (String filepath : fileForOutputPaths.keySet()) {
if (recurse && filepath.startsWith(packageDirPath)) {
results.add(fileForOutputPaths.get(filepath));
} else if (!recurse
&& filepath.startsWith(packageDirPath)
&& filepath.substring(packageDirPath.length()).indexOf('/') < 0) {
results.add(fileForOutputPaths.get(filepath));
}
}
return results;
}
public ImmutableSet<String> writeToJar(JarBuilder jarBuilder) throws IOException {
for (JarFileObject fileObject : fileForOutputPaths.values()) {
fileObject.writeToJar(jarBuilder, jarPath.toString());
}
return ImmutableSet.copyOf(Sets.union(directoryPaths, fileForOutputPaths.keySet()));
}
private boolean shouldDelegate(Location location) {
return location != CLASS_OUTPUT;
}
private JavaFileObject getJavaMemoryFileObject(JavaFileObject.Kind kind, String path)
throws IOException {
return fileForOutputPaths.computeIfAbsent(
path, p -> new JavaInMemoryFileObject(getUriPath(p), p, kind));
}
private JavaFileObject getJavaNoOpFileObject(String path, JavaFileObject.Kind kind) {
return fileForOutputPaths.computeIfAbsent(
path, p -> new JavaNoOpFileObject(getUriPath(p), p, kind));
}
private String encodeURL(String path) {
try {
return URLEncoder.encode(path, "UTF-8").replace("%2F", "/");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
private URI getUriPath(String relativePath) {
return URI.create("jar:file:" + encodeURL(jarPath.toString()) + "!/" + encodeURL(relativePath));
}
}
| |
package com.dacer.androidcharts;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.view.View;
import android.view.animation.AnimationUtils;
import java.util.ArrayList;
/**
* Created by Dacer on 11/11/13.
*/
public class BarView extends View {
private ArrayList<Float> percentList;
private ArrayList<Float> targetPercentList;
private Paint textPaint;
private Paint bgPaint;
private Paint fgPaint;
private Rect rect;
private int barWidth;
// private boolean showSideMargin = true;
private int bottomTextDescent;
private boolean autoSetWidth = true;
private int topMargin;
private int bottomTextHeight;
private ArrayList<String> bottomTextList = new ArrayList<String>();
private final int MINI_BAR_WIDTH;
private final int BAR_SIDE_MARGIN;
private final int TEXT_TOP_MARGIN;
private final int TEXT_COLOR = Color.parseColor("#9B9A9B");
private final int BACKGROUND_COLOR = Color.parseColor("#F6F6F6");
private final int FOREGROUND_COLOR = Color.parseColor("#FC496D");
private Runnable animator = new Runnable() {
@Override
public void run() {
boolean needNewFrame = false;
for (int i=0; i<targetPercentList.size();i++) {
if (percentList.get(i) < targetPercentList.get(i)) {
percentList.set(i,percentList.get(i)+0.02f);
needNewFrame = true;
} else if (percentList.get(i) > targetPercentList.get(i)){
percentList.set(i,percentList.get(i)-0.02f);
needNewFrame = true;
}
if(Math.abs(targetPercentList.get(i)-percentList.get(i))<0.02f){
percentList.set(i,targetPercentList.get(i));
}
}
if (needNewFrame) {
postDelayed(this, 20);
}
invalidate();
}
};
public BarView(Context context){
this(context,null);
}
public BarView(Context context, AttributeSet attrs){
super(context, attrs);
bgPaint = new Paint();
bgPaint.setAntiAlias(true);
bgPaint.setColor(BACKGROUND_COLOR);
fgPaint = new Paint(bgPaint);
fgPaint.setColor(FOREGROUND_COLOR);
rect = new Rect();
topMargin = MyUtils.dip2px(context, 5);
int textSize = MyUtils.sp2px(context, 15);
barWidth = MyUtils.dip2px(context,22);
MINI_BAR_WIDTH = MyUtils.dip2px(context,22);
BAR_SIDE_MARGIN = MyUtils.dip2px(context,22);
TEXT_TOP_MARGIN = MyUtils.dip2px(context, 5);
textPaint = new Paint();
textPaint.setAntiAlias(true);
textPaint.setColor(TEXT_COLOR);
textPaint.setTextSize(textSize);
textPaint.setTextAlign(Paint.Align.CENTER);
percentList = new ArrayList<Float>();
}
/**
* dataList will be reset when called is method.
* @param bottomStringList The String ArrayList in the bottom.
*/
public void setBottomTextList(ArrayList<String> bottomStringList){
// this.dataList = null;
this.bottomTextList = bottomStringList;
Rect r = new Rect();
bottomTextDescent = 0;
barWidth = MINI_BAR_WIDTH;
for(String s:bottomTextList){
textPaint.getTextBounds(s,0,s.length(),r);
if(bottomTextHeight<r.height()){
bottomTextHeight = r.height();
}
if(autoSetWidth&&(barWidth<r.width())){
barWidth = r.width();
}
if(bottomTextDescent<(Math.abs(r.bottom))){
bottomTextDescent = Math.abs(r.bottom);
}
}
setMinimumWidth(2);
postInvalidate();
}
/**
*
* @param list The ArrayList of Integer with the range of [0-max].
*/
public void setDataList(ArrayList<Integer> list, int max){
targetPercentList = new ArrayList<Float>();
if(max == 0) max = 1;
for(Integer integer : list){
targetPercentList.add(1-(float)integer/(float)max);
}
// Make sure percentList.size() == targetPercentList.size()
if(percentList.isEmpty() || percentList.size()<targetPercentList.size()){
int temp = targetPercentList.size()-percentList.size();
for(int i=0; i<temp;i++){
percentList.add(1f);
}
} else if (percentList.size()>targetPercentList.size()){
int temp = percentList.size()-targetPercentList.size();
for(int i=0; i<temp;i++){
percentList.remove(percentList.size()-1);
}
}
setMinimumWidth(2);
removeCallbacks(animator);
post(animator);
}
@Override
protected void onDraw(Canvas canvas) {
int i = 1;
if(percentList != null && !percentList.isEmpty()){
for(Float f:percentList){
rect.set(BAR_SIDE_MARGIN*i+barWidth*(i-1),
topMargin,
(BAR_SIDE_MARGIN+barWidth)* i,
getHeight()-bottomTextHeight-TEXT_TOP_MARGIN);
canvas.drawRect(rect,bgPaint);
/*rect.set(BAR_SIDE_MARGIN*i+barWidth*(i-1),
topMargin+(int)((getHeight()-topMargin)*percentList.get(i-1)),
(BAR_SIDE_MARGIN+barWidth)* i,
getHeight()-bottomTextHeight-TEXT_TOP_MARGIN);*/
/**
* The correct total height is "getHeight()-topMargin-bottomTextHeight-TEXT_TOP_MARGIN",not "getHeight()-topMargin".
* fix by zhenghuiy@gmail.com on 11/11/13.
*/
rect.set(BAR_SIDE_MARGIN*i+barWidth*(i-1),
topMargin+(int)((getHeight()-topMargin-bottomTextHeight-TEXT_TOP_MARGIN)*percentList.get(i-1)),
(BAR_SIDE_MARGIN+barWidth)* i,
getHeight()-bottomTextHeight-TEXT_TOP_MARGIN);
canvas.drawRect(rect,fgPaint);
i++;
}
}
if(bottomTextList != null && !bottomTextList.isEmpty()){
i = 1;
for(String s:bottomTextList){
canvas.drawText(s,BAR_SIDE_MARGIN*i+barWidth*(i-1)+barWidth/2,
getHeight()-bottomTextDescent,textPaint);
i++;
}
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int mViewWidth = measureWidth(widthMeasureSpec);
int mViewHeight = measureHeight(heightMeasureSpec);
setMeasuredDimension(mViewWidth,mViewHeight);
}
private int measureWidth(int measureSpec){
int preferred = 0;
if(bottomTextList != null){
preferred = bottomTextList.size()*(barWidth+BAR_SIDE_MARGIN);
}
return getMeasurement(measureSpec, preferred);
}
private int measureHeight(int measureSpec){
int preferred = 222;
return getMeasurement(measureSpec, preferred);
}
private int getMeasurement(int measureSpec, int preferred){
int specSize = MeasureSpec.getSize(measureSpec);
int measurement;
switch(MeasureSpec.getMode(measureSpec)){
case MeasureSpec.EXACTLY:
measurement = specSize;
break;
case MeasureSpec.AT_MOST:
measurement = Math.min(preferred, specSize);
break;
default:
measurement = preferred;
break;
}
return measurement;
}
}
| |
/*
* Copyright (C) 2011-2012 Dr. John Lindsay <jlindsay@uoguelph.ca>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package plugins;
import java.util.Date;
import java.util.Random;
import whitebox.geospatialfiles.WhiteboxRaster;
import whitebox.interfaces.WhiteboxPlugin;
import whitebox.interfaces.WhiteboxPluginHost;
/**
* This tool can be used to create a random field using the turning bands algorithm.
*
* @author Dr. John Lindsay email: jlindsay@uoguelph.ca
*/
public class TurningBands implements WhiteboxPlugin {
private WhiteboxPluginHost myHost = null;
private String[] args;
/**
* Used to retrieve the plugin tool's name. This is a short, unique name
* containing no spaces.
*
* @return String containing plugin name.
*/
@Override
public String getName() {
return "TurningBands";
}
/**
* Used to retrieve the plugin tool's descriptive name. This can be a longer
* name (containing spaces) and is used in the interface to list the tool.
*
* @return String containing the plugin descriptive name.
*/
@Override
public String getDescriptiveName() {
return "Turning Bands Simulation";
}
/**
* Used to retrieve a short description of what the plugin tool does.
*
* @return String containing the plugin's description.
*/
@Override
public String getToolDescription() {
return "This tool implements a turning bands simulation for random grid generation.";
}
/**
* Used to identify which toolboxes this plugin tool should be listed in.
*
* @return Array of Strings.
*/
@Override
public String[] getToolbox() {
String[] ret = { "StatisticalTools", "RasterCreation" };
return ret;
}
/**
* Sets the WhiteboxPluginHost to which the plugin tool is tied. This is the
* class that the plugin will send all feedback messages, progress updates,
* and return objects.
*
* @param host The WhiteboxPluginHost that called the plugin tool.
*/
@Override
public void setPluginHost(WhiteboxPluginHost host) {
myHost = host;
}
/**
* Used to communicate feedback pop-up messages between a plugin tool and
* the main Whitebox user-interface.
*
* @param feedback String containing the text to display.
*/
private void showFeedback(String message) {
if (myHost != null) {
myHost.showFeedback(message);
} else {
System.out.println(message);
}
}
/**
* Used to communicate a return object from a plugin tool to the main
* Whitebox user-interface.
*
* @return Object, such as an output WhiteboxRaster.
*/
private void returnData(Object ret) {
if (myHost != null) {
myHost.returnData(ret);
}
}
private int previousProgress = 0;
private String previousProgressLabel = "";
/**
* Used to communicate a progress update between a plugin tool and the main
* Whitebox user interface.
*
* @param progressLabel A String to use for the progress label.
* @param progress Float containing the progress value (between 0 and 100).
*/
private void updateProgress(String progressLabel, int progress) {
if (myHost != null && ((progress != previousProgress) ||
(!progressLabel.equals(previousProgressLabel)))) {
myHost.updateProgress(progressLabel, progress);
}
previousProgress = progress;
previousProgressLabel = progressLabel;
}
/**
* Used to communicate a progress update between a plugin tool and the main
* Whitebox user interface.
*
* @param progress Float containing the progress value (between 0 and 100).
*/
private void updateProgress(int progress) {
if (myHost != null && progress != previousProgress) {
myHost.updateProgress(progress);
}
previousProgress = progress;
}
/**
* Sets the arguments (parameters) used by the plugin.
*
* @param args An array of string arguments.
*/
@Override
public void setArgs(String[] args) {
this.args = args.clone();
}
private boolean cancelOp = false;
/**
* Used to communicate a cancel operation from the Whitebox GUI.
*
* @param cancel Set to true if the plugin should be canceled.
*/
@Override
public void setCancelOp(boolean cancel) {
cancelOp = cancel;
}
private void cancelOperation() {
showFeedback("Operation cancelled.");
updateProgress("Progress: ", 0);
}
private boolean amIActive = false;
/**
* Used by the Whitebox GUI to tell if this plugin is still running.
*
* @return a boolean describing whether or not the plugin is actively being
* used.
*/
@Override
public boolean isActive() {
return amIActive;
}
/**
* Used to execute this plugin tool.
*/
@Override
public void run() {
amIActive = true;
float progress = 0;
String inputHeader = null;
String outputHeader = null;
double range = 0;
double sill = 0;
double nugget = 0;
int numIterations = 1000;
boolean fastMode = false;
if (args.length <= 0) {
showFeedback("Plugin parameters have not been set.");
return;
}
for (int i = 0; i < args.length; i++) {
if (i == 0) {
inputHeader = args[i];
} else if (i == 1) {
outputHeader = args[i];
} else if (i == 2) {
range = Double.parseDouble(args[i]);
} else if (i == 3) {
numIterations = Integer.parseInt(args[i]);
} else if (i == 4) {
fastMode = Boolean.parseBoolean(args[i]);
}
}
// check to see that the inputHeader and outputHeader are not null.
if ((inputHeader == null) || (outputHeader == null)) {
showFeedback("One or more of the input parameters have not been set properly.");
return;
}
try {
int row, col;
int i, j, k, m, n;
int edge1, edge2;
double pnt1x = 0, pnt1y = 0, pnt2x = 0, pnt2y = 0;
double z;
int diagonalSize = 0;
Random generator = new Random(); //74657382);
WhiteboxRaster image = new WhiteboxRaster(inputHeader, "r");
double noData = image.getNoDataValue();
int rows = image.getNumberRows();
int cols = image.getNumberColumns();
diagonalSize = (int) (Math.sqrt(rows * rows + cols * cols));
int filterHalfSize = (int) (range / (2 * image.getCellSizeX()));
int filterSize = filterHalfSize * 2 + 1;
int[] cellOffsets = new int[filterSize];
for (i = 0; i < filterSize; i++) {
cellOffsets[i] = i - filterHalfSize;
}
double w = Math.sqrt(36d / (filterHalfSize * (filterHalfSize + 1) * filterSize));
// create the new output grid.
WhiteboxRaster outputFile = new WhiteboxRaster(outputHeader, "rw", inputHeader, WhiteboxRaster.DataType.FLOAT, 0);
outputFile.setPreferredPalette("blue_white_red.pal");
if (!fastMode) {
// loop through the number of iterations
updateProgress("Loop 1 of 2: ", 0);
for (i = 0; i < numIterations; i++) {
// create the data line and fill it with random numbers.
// notice that the initial dataline is 2 * filterHalfSize larger
// because of the edge effects of the filter.
double[] T = new double[diagonalSize + 2 * filterHalfSize];
for (j = 0; j < diagonalSize; j++) {
T[j] = generator.nextGaussian();
}
double[] y = new double[diagonalSize];
// filter the line
for (j = 0; j < diagonalSize; j++) {
z = 0;
for (k = 0; k < filterSize; k++) {
m = cellOffsets[k];
z += m * T[j + filterHalfSize + m];
}
y[j] = w * z;
}
//dataLine = new double[-1];
// assign the spatially autocorrelated data line an equation of a transect of the grid
// first, pick two points on different edges of the grid at random.
// Edges are as follows 0 = left, 1 = top, 2 = right, and 3 = bottom
edge1 = generator.nextInt(4);
edge2 = edge1;
do {
edge2 = generator.nextInt(4);
} while (edge2 == edge1);
switch (edge1) {
case 0:
pnt1x = 0;
pnt1y = generator.nextDouble() * (rows - 1);
break;
case 1:
pnt1x = generator.nextDouble() * (cols - 1);
pnt1y = 0;
break;
case 2:
pnt1x = cols - 1;
pnt1y = generator.nextDouble() * (rows - 1);
break;
case 3:
pnt1x = generator.nextDouble() * (cols - 1);
pnt1y = rows - 1;
break;
}
switch (edge2) {
case 0:
pnt2x = 0;
pnt2y = generator.nextDouble() * (rows - 1);
break;
case 1:
pnt2x = generator.nextDouble() * (cols - 1);
pnt2y = 0;
break;
case 2:
pnt2x = cols - 1;
pnt2y = generator.nextDouble() * (rows - 1);
break;
case 3:
pnt2x = generator.nextDouble() * (cols - 1);
pnt2y = rows - 1;
break;
}
if (pnt1x == pnt2x || pnt1y == pnt2y) {
do {
switch (edge2) {
case 0:
pnt2x = 0;
pnt2y = generator.nextDouble() * (rows - 1);
break;
case 1:
pnt2x = generator.nextDouble() * (cols - 1);
pnt2y = 0;
break;
case 2:
pnt2x = cols - 1;
pnt2y = generator.nextDouble() * (rows - 1);
break;
case 3:
pnt2x = generator.nextDouble() * (cols - 1);
pnt2y = rows - 1;
break;
}
} while (pnt1x == pnt2x || pnt1y == pnt2y);
}
double lineSlope = (pnt2y - pnt1y) / (pnt2x - pnt1x);
double lineIntercept = pnt1y - lineSlope * pnt1x;
double perpendicularLineSlope = -1 / lineSlope;
double slopeDiff = (lineSlope - perpendicularLineSlope);
double perpendicularLineIntercept = 0;
double intersectingPointX, intersectingPointY;
// for each of the four corners, figure out what the perpendicular line
// intersection coordinates would be.
// point (0,0)
perpendicularLineIntercept = 0;
double corner1X = (perpendicularLineIntercept - lineIntercept) / slopeDiff;
double corner1Y = lineSlope * corner1X - lineIntercept;
// point (0,cols)
row = 0;
col = cols;
perpendicularLineIntercept = row - perpendicularLineSlope * col;;
double corner2X = (perpendicularLineIntercept - lineIntercept) / slopeDiff;
double corner2Y = lineSlope * corner2X - lineIntercept;
// point (rows,0)
row = rows;
col = 0;
perpendicularLineIntercept = row - perpendicularLineSlope * col;;
double corner3X = (perpendicularLineIntercept - lineIntercept) / slopeDiff;
double corner3Y = lineSlope * corner3X - lineIntercept;
// point (rows,cols)
row = rows;
col = cols;
perpendicularLineIntercept = row - perpendicularLineSlope * col;;
double corner4X = (perpendicularLineIntercept - lineIntercept) / slopeDiff;
double corner4Y = lineSlope * corner4X - lineIntercept;
// find the point with the minimum Y value and set it as the line starting point
double lineStartX, lineStartY;
lineStartX = corner1X;
lineStartY = corner1Y;
if (corner2Y < lineStartY) {
lineStartX = corner2X;
lineStartY = corner2Y;
}
if (corner3Y < lineStartY) {
lineStartX = corner3X;
lineStartY = corner3Y;
}
if (corner4Y < lineStartY) {
lineStartX = corner4X;
lineStartY = corner4Y;
}
// scan through each grid cell and assign it the closest value on the line segment
for (row = 0; row < rows; row++) {
for (col = 0; col < cols; col++) {
perpendicularLineIntercept = row - perpendicularLineSlope * col;
intersectingPointX = (perpendicularLineIntercept - lineIntercept) / slopeDiff;
intersectingPointY = lineSlope * intersectingPointX - lineIntercept;
int p = (int) (Math.sqrt((intersectingPointX - lineStartX) * (intersectingPointX - lineStartX)
+ (intersectingPointY - lineStartY) * (intersectingPointY - lineStartY)));
if (p < 0) {
p = 0;
}
if (p > (diagonalSize - 1)) {
p = diagonalSize - 1;
}
z = outputFile.getValue(row, col) + y[p];
outputFile.setValue(row, col, z);
}
}
// check for a cancellation of the operation.
if (cancelOp) {
cancelOperation();
return;
}
// update the progress.
progress = (float) (i * 100f / numIterations);
updateProgress("Loop 1 of 2: ", (int) progress);
}
updateProgress("Loop 2 of 2: ", 0);
//double rootNumIterations = Math.sqrt(numIterations);
double value;
for (row = 0; row < rows; row++) {
for (col = 0; col < cols; col++) {
z = outputFile.getValue(row, col);
value = (float) (z / numIterations);
outputFile.setValue(row, col, value);
}
if (cancelOp) {
cancelOperation();
return;
}
progress = (float) (100f * row / rows);
updateProgress("Loop 2 of 2: ", (int) progress);
}
} else {
double[][] output = new double[rows][cols];
// loop through the number of iterations
updateProgress("Loop 1 of 2: ", 0);
for (i = 0; i < numIterations; i++) {
// create the data line and fill it with random numbers.
// notice that the initial dataline is 2 * filterHalfSize larger
// because of the edge effects of the filter.
double[] T = new double[diagonalSize + 2 * filterHalfSize];
for (j = 0; j < diagonalSize; j++) {
T[j] = generator.nextGaussian();
}
double[] y = new double[diagonalSize];
// filter the line
for (j = 0; j < diagonalSize; j++) {
z = 0;
for (k = 0; k < filterSize; k++) {
m = cellOffsets[k];
z += m * T[j + filterHalfSize + m];
}
y[j] = w * z;
}
//dataLine = new double[-1];
// assign the spatially autocorrelated data line an equation of a transect of the grid
// first, pick two points on different edges of the grid at random.
// Edges are as follows 0 = left, 1 = top, 2 = right, and 3 = bottom
edge1 = generator.nextInt(4);
edge2 = edge1;
do {
edge2 = generator.nextInt(4);
} while (edge2 == edge1);
switch (edge1) {
case 0:
pnt1x = 0;
pnt1y = generator.nextDouble() * (rows - 1);
break;
case 1:
pnt1x = generator.nextDouble() * (cols - 1);
pnt1y = 0;
break;
case 2:
pnt1x = cols - 1;
pnt1y = generator.nextDouble() * (rows - 1);
break;
case 3:
pnt1x = generator.nextDouble() * (cols - 1);
pnt1y = rows - 1;
break;
}
switch (edge2) {
case 0:
pnt2x = 0;
pnt2y = generator.nextDouble() * (rows - 1);
break;
case 1:
pnt2x = generator.nextDouble() * (cols - 1);
pnt2y = 0;
break;
case 2:
pnt2x = cols - 1;
pnt2y = generator.nextDouble() * (rows - 1);
break;
case 3:
pnt2x = generator.nextDouble() * (cols - 1);
pnt2y = rows - 1;
break;
}
if (pnt1x == pnt2x || pnt1y == pnt2y) {
do {
switch (edge2) {
case 0:
pnt2x = 0;
pnt2y = generator.nextDouble() * (rows - 1);
break;
case 1:
pnt2x = generator.nextDouble() * (cols - 1);
pnt2y = 0;
break;
case 2:
pnt2x = cols - 1;
pnt2y = generator.nextDouble() * (rows - 1);
break;
case 3:
pnt2x = generator.nextDouble() * (cols - 1);
pnt2y = rows - 1;
break;
}
} while (pnt1x == pnt2x || pnt1y == pnt2y);
}
double lineSlope = (pnt2y - pnt1y) / (pnt2x - pnt1x);
double lineIntercept = pnt1y - lineSlope * pnt1x;
double perpendicularLineSlope = -1 / lineSlope;
double slopeDiff = (lineSlope - perpendicularLineSlope);
double perpendicularLineIntercept = 0;
double intersectingPointX, intersectingPointY;
// for each of the four corners, figure out what the perpendicular line
// intersection coordinates would be.
// point (0,0)
perpendicularLineIntercept = 0;
double corner1X = (perpendicularLineIntercept - lineIntercept) / slopeDiff;
double corner1Y = lineSlope * corner1X - lineIntercept;
// point (0,cols)
row = 0;
col = cols;
perpendicularLineIntercept = row - perpendicularLineSlope * col;;
double corner2X = (perpendicularLineIntercept - lineIntercept) / slopeDiff;
double corner2Y = lineSlope * corner2X - lineIntercept;
// point (rows,0)
row = rows;
col = 0;
perpendicularLineIntercept = row - perpendicularLineSlope * col;;
double corner3X = (perpendicularLineIntercept - lineIntercept) / slopeDiff;
double corner3Y = lineSlope * corner3X - lineIntercept;
// point (rows,cols)
row = rows;
col = cols;
perpendicularLineIntercept = row - perpendicularLineSlope * col;;
double corner4X = (perpendicularLineIntercept - lineIntercept) / slopeDiff;
double corner4Y = lineSlope * corner4X - lineIntercept;
// find the point with the minimum Y value and set it as the line starting point
double lineStartX, lineStartY;
lineStartX = corner1X;
lineStartY = corner1Y;
if (corner2Y < lineStartY) {
lineStartX = corner2X;
lineStartY = corner2Y;
}
if (corner3Y < lineStartY) {
lineStartX = corner3X;
lineStartY = corner3Y;
}
if (corner4Y < lineStartY) {
lineStartX = corner4X;
lineStartY = corner4Y;
}
// scan through each grid cell and assign it the closest value on the line segment
for (row = 0; row < rows; row++) {
for (col = 0; col < cols; col++) {
perpendicularLineIntercept = row - perpendicularLineSlope * col;
intersectingPointX = (perpendicularLineIntercept - lineIntercept) / slopeDiff;
intersectingPointY = lineSlope * intersectingPointX - lineIntercept;
int p = (int) (Math.sqrt((intersectingPointX - lineStartX) * (intersectingPointX - lineStartX)
+ (intersectingPointY - lineStartY) * (intersectingPointY - lineStartY)));
if (p < 0) {
p = 0;
}
if (p > (diagonalSize - 1)) {
p = diagonalSize - 1;
}
output[row][col] += y[p];
}
}
// check for a cancellation of the operation.
if (cancelOp) {
cancelOperation();
return;
}
// update the progress.
progress = (float) (i * 100f / numIterations);
updateProgress("Loop 1 of 2: ", (int) progress);
}
updateProgress("Loop 2 of 2: ", 0);
//double rootNumIterations = Math.sqrt(numIterations);
double value;
for (row = 0; row < rows; row++) {
for (col = 0; col < cols; col++) {
value = (float) (output[row][col] / numIterations);
outputFile.setValue(row, col, value);
}
if (cancelOp) {
cancelOperation();
return;
}
progress = (float) (100f * row / rows);
updateProgress("Loop 2 of 2: ", (int) progress);
}
}
outputFile.addMetadataEntry("Created by the "
+ getDescriptiveName() + " tool.");
outputFile.addMetadataEntry("Created on " + new Date());
image.close();
outputFile.close();
// returning a header file string displays the image.
returnData(outputHeader);
} catch (OutOfMemoryError oe) {
myHost.showFeedback("An out-of-memory error has occurred during operation.");
} catch (Exception e) {
myHost.showFeedback("An error has occurred during operation. See log file for details.");
myHost.logException("Error in " + getDescriptiveName(), e);
} finally {
updateProgress("Progress: ", 0);
// tells the main application that this process is completed.
amIActive = false;
myHost.pluginComplete();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.query.continuous;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import javax.cache.configuration.FactoryBuilder;
import javax.cache.event.CacheEntryEvent;
import javax.cache.event.CacheEntryListenerException;
import javax.cache.event.CacheEntryUpdatedListener;
import javax.cache.processor.EntryProcessorException;
import javax.cache.processor.MutableEntry;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CacheEntryEventSerializableFilter;
import org.apache.ignite.cache.CacheEntryProcessor;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.cache.query.ContinuousQuery;
import org.apache.ignite.cache.query.QueryCursor;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.PA;
import org.apache.ignite.internal.util.typedef.T2;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.lang.IgniteAsyncCallback;
import org.apache.ignite.resources.IgniteInstanceResource;
import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.spi.eventstorage.memory.MemoryEventStorageSpi;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.transactions.Transaction;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheMode.REPLICATED;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.PRIMARY_SYNC;
/**
*
*/
public class CacheContinuousQueryOperationFromCallbackTest extends GridCommonAbstractTest {
/** */
public static final int KEYS = 10;
/** */
public static final int KEYS_FROM_CALLBACK = 20;
/** */
private static TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** */
private static final int NODES = 5;
/** */
public static final int ITERATION_CNT = 20;
/** */
public static final int SYSTEM_POOL_SIZE = 10;
/** */
private boolean client;
/** */
private static AtomicInteger filterCbCntr = new AtomicInteger(0);
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
cfg.setSystemThreadPoolSize(SYSTEM_POOL_SIZE);
((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(ipFinder);
((TcpCommunicationSpi)cfg.getCommunicationSpi()).setSharedMemoryPort(-1);
cfg.setClientMode(client);
MemoryEventStorageSpi storeSpi = new MemoryEventStorageSpi();
storeSpi.setExpireCount(100);
cfg.setEventStorageSpi(storeSpi);
return cfg;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
super.beforeTestsStarted();
startGridsMultiThreaded(NODES - 1);
client = true;
startGrid(NODES - 1);
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
stopAllGrids();
super.afterTestsStopped();
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
super.beforeTest();
filterCbCntr.set(0);
}
/**
* @throws Exception If failed.
*/
public void testAtomicTwoBackups() throws Exception {
CacheConfiguration<Object, Object> ccfg = cacheConfiguration(PARTITIONED, 2, ATOMIC, FULL_SYNC);
doTest(ccfg, true);
}
/**
* @throws Exception If failed.
*/
public void testTxTwoBackupsFilter() throws Exception {
CacheConfiguration<Object, Object> ccfg = cacheConfiguration(PARTITIONED, 2, TRANSACTIONAL, FULL_SYNC);
doTest(ccfg, false);
}
/**
* @throws Exception If failed.
*/
public void testTxTwoBackupsFilterPrimary() throws Exception {
CacheConfiguration<Object, Object> ccfg = cacheConfiguration(PARTITIONED, 2, TRANSACTIONAL, PRIMARY_SYNC);
doTest(ccfg, false);
}
/**
* @throws Exception If failed.
*/
public void testTxReplicatedFilter() throws Exception {
CacheConfiguration<Object, Object> ccfg = cacheConfiguration(REPLICATED, 0, TRANSACTIONAL, FULL_SYNC);
doTest(ccfg, false);
}
/**
* @throws Exception If failed.
*/
public void testTxTwoBackup() throws Exception {
CacheConfiguration<Object, Object> ccfg = cacheConfiguration(PARTITIONED, 2, TRANSACTIONAL, FULL_SYNC);
doTest(ccfg, true);
}
/**
* @throws Exception If failed.
*/
public void testTxReplicated() throws Exception {
CacheConfiguration<Object, Object> ccfg = cacheConfiguration(REPLICATED, 2, TRANSACTIONAL, FULL_SYNC);
doTest(ccfg, true);
}
/**
* @throws Exception If failed.
*/
public void testTxReplicatedPrimary() throws Exception {
CacheConfiguration<Object, Object> ccfg = cacheConfiguration(REPLICATED, 2, TRANSACTIONAL, PRIMARY_SYNC);
doTest(ccfg, true);
}
/**
* @param ccfg Cache configuration.
* @throws Exception If failed.
*/
protected void doTest(final CacheConfiguration ccfg, boolean fromLsnr) throws Exception {
ignite(0).createCache(ccfg);
List<QueryCursor<?>> qries = new ArrayList<>();
assertEquals(0, filterCbCntr.get());
try {
List<Set<T2<QueryTestKey, QueryTestValue>>> rcvdEvts = new ArrayList<>(NODES);
List<Set<T2<QueryTestKey, QueryTestValue>>> evtsFromCallbacks = new ArrayList<>(NODES);
final AtomicInteger qryCntr = new AtomicInteger(0);
final AtomicInteger cbCntr = new AtomicInteger(0);
final int threadCnt = SYSTEM_POOL_SIZE * 2;
for (int idx = 0; idx < NODES; idx++) {
Set<T2<QueryTestKey, QueryTestValue>> evts = Collections.
newSetFromMap(new ConcurrentHashMap<T2<QueryTestKey, QueryTestValue>, Boolean>());
Set<T2<QueryTestKey, QueryTestValue>> evtsFromCb = Collections.
newSetFromMap(new ConcurrentHashMap<T2<QueryTestKey, QueryTestValue>, Boolean>());
IgniteCache<Object, Object> cache = grid(idx).getOrCreateCache(ccfg.getName());
ContinuousQuery qry = new ContinuousQuery();
qry.setLocalListener(new TestCacheAsyncEventListener(evts, evtsFromCb,
fromLsnr ? cache : null, qryCntr, cbCntr));
if (!fromLsnr)
qry.setRemoteFilterFactory(
FactoryBuilder.factoryOf(new CacheTestRemoteFilterAsync(ccfg.getName())));
rcvdEvts.add(evts);
evtsFromCallbacks.add(evtsFromCb);
QueryCursor qryCursor = cache.query(qry);
qries.add(qryCursor);
}
IgniteInternalFuture<Long> f = GridTestUtils.runMultiThreadedAsync(new Runnable() {
@Override public void run() {
ThreadLocalRandom rnd = ThreadLocalRandom.current();
for (int i = 0; i < ITERATION_CNT; i++) {
IgniteCache<QueryTestKey, QueryTestValue> cache =
grid(rnd.nextInt(NODES)).cache(ccfg.getName());
QueryTestKey key = new QueryTestKey(rnd.nextInt(KEYS));
boolean startTx = cache.getConfiguration(CacheConfiguration.class).getAtomicityMode() ==
TRANSACTIONAL && rnd.nextBoolean();
Transaction tx = null;
if (startTx)
tx = cache.unwrap(Ignite.class).transactions().txStart();
try {
if ((cache.get(key) == null) || rnd.nextBoolean())
cache.invoke(key, new IncrementTestEntryProcessor());
else {
QueryTestValue val;
QueryTestValue newVal;
do {
val = cache.get(key);
newVal = val == null ?
new QueryTestValue(0) : new QueryTestValue(val.val1 + 1);
}
while (!cache.replace(key, val, newVal));
}
}
finally {
if (tx != null)
tx.commit();
}
}
}
}, threadCnt, "put-thread");
f.get(30, TimeUnit.SECONDS);
assert GridTestUtils.waitForCondition(new PA() {
@Override public boolean apply() {
return qryCntr.get() >= ITERATION_CNT * threadCnt * NODES;
}
}, TimeUnit.MINUTES.toMillis(2));
for (Set<T2<QueryTestKey, QueryTestValue>> set : rcvdEvts)
checkEvents(set, ITERATION_CNT * threadCnt, grid(0).cache(ccfg.getName()), false);
if (fromLsnr) {
final int expCnt = qryCntr.get() * NODES * KEYS_FROM_CALLBACK;
boolean res = GridTestUtils.waitForCondition(new PA() {
@Override public boolean apply() {
return cbCntr.get() >= expCnt;
}
}, TimeUnit.SECONDS.toMillis(60));
assertTrue("Failed to wait events [exp=" + expCnt + ", act=" + cbCntr.get() + "]", res);
assertEquals(expCnt, cbCntr.get());
for (Set<T2<QueryTestKey, QueryTestValue>> set : evtsFromCallbacks)
checkEvents(set, qryCntr.get() * KEYS_FROM_CALLBACK, grid(0).cache(ccfg.getName()), true);
}
else {
final int expInvkCnt = ITERATION_CNT * threadCnt *
(ccfg.getCacheMode() != REPLICATED ? (ccfg.getBackups() + 1) : NODES - 1) * NODES;
GridTestUtils.waitForCondition(new PA() {
@Override public boolean apply() {
return filterCbCntr.get() >= expInvkCnt;
}
}, TimeUnit.SECONDS.toMillis(60));
assertEquals(expInvkCnt, filterCbCntr.get());
for (Set<T2<QueryTestKey, QueryTestValue>> set : evtsFromCallbacks)
checkEvents(set, expInvkCnt * KEYS_FROM_CALLBACK, grid(0).cache(ccfg.getName()), true);
}
}
finally {
for (QueryCursor<?> qry : qries)
qry.close();
ignite(0).destroyCache(ccfg.getName());
}
}
/**
* @param expCnt Expected count.
* @param cache Cache.
* @param set Received events.
* @throws Exception If failed.
*/
private void checkEvents(final Set<T2<QueryTestKey, QueryTestValue>> set, final int expCnt, IgniteCache cache,
boolean cb) throws Exception {
assertTrue("Expected size: " + expCnt + ", actual: " + set.size(), GridTestUtils.waitForCondition(new PA() {
@Override public boolean apply() {
return set.size() >= expCnt;
}
}, 10000L));
int startKey = cb ? KEYS : 0;
int endKey = cb ? KEYS + KEYS_FROM_CALLBACK : KEYS;
for (int i = startKey; i < endKey; i++) {
QueryTestKey key = new QueryTestKey(i);
QueryTestValue maxVal = (QueryTestValue)cache.get(key);
for (int val = 0; val <= maxVal.val1; val++)
assertTrue(set.remove(new T2<>(key, new QueryTestValue(val))));
}
assertTrue(set.isEmpty());
}
/**
*
*/
private static class IncrementTestEntryProcessor implements
CacheEntryProcessor<QueryTestKey, QueryTestValue, Object> {
/** {@inheritDoc} */
@Override public Object process(MutableEntry<QueryTestKey, QueryTestValue> entry, Object... arguments)
throws EntryProcessorException {
if (entry.exists())
entry.setValue(new QueryTestValue(entry.getValue().val1 + 1));
else
entry.setValue(new QueryTestValue(0));
return null;
}
}
/**
*
*/
@IgniteAsyncCallback
private static class CacheTestRemoteFilterAsync implements
CacheEntryEventSerializableFilter<QueryTestKey, QueryTestValue> {
/** */
@IgniteInstanceResource
private Ignite ignite;
/** */
private String cacheName;
/**
* @param cacheName Cache name.
*/
public CacheTestRemoteFilterAsync(String cacheName) {
this.cacheName = cacheName;
}
/** {@inheritDoc} */
@Override public boolean evaluate(CacheEntryEvent<? extends QueryTestKey, ? extends QueryTestValue> e)
throws CacheEntryListenerException {
if (e.getKey().compareTo(new QueryTestKey(KEYS)) < 0) {
IgniteCache<QueryTestKey, QueryTestValue> cache = ignite.cache(cacheName);
if (ThreadLocalRandom.current().nextBoolean()) {
Set<QueryTestKey> keys = new LinkedHashSet<>();
for (int key = KEYS; key < KEYS + KEYS_FROM_CALLBACK; key++)
keys.add(new QueryTestKey(key));
cache.invokeAll(keys, new IncrementTestEntryProcessor());
}
else {
for (int key = KEYS; key < KEYS + KEYS_FROM_CALLBACK; key++)
cache.invoke(new QueryTestKey(key), new IncrementTestEntryProcessor());
}
filterCbCntr.incrementAndGet();
}
return true;
}
}
/**
*
*/
@IgniteAsyncCallback
private static class TestCacheAsyncEventListener
implements CacheEntryUpdatedListener<QueryTestKey, QueryTestValue> {
/** */
private final Set<T2<QueryTestKey, QueryTestValue>> rcvsEvts;
/** */
private final AtomicInteger cntr;
/** */
private final AtomicInteger cbCntr;
/** */
private final Set<T2<QueryTestKey, QueryTestValue>> evtsFromCb;
/** */
private IgniteCache<QueryTestKey, QueryTestValue> cache;
/**
* @param rcvsEvts Set for received events.
* @param evtsFromCb Set for received events.
* @param cache Ignite cache.
* @param cntr Received events counter.
* @param cbCntr Received events counter from callbacks.
*/
public TestCacheAsyncEventListener(Set<T2<QueryTestKey, QueryTestValue>> rcvsEvts,
Set<T2<QueryTestKey, QueryTestValue>> evtsFromCb,
@Nullable IgniteCache cache,
AtomicInteger cntr,
AtomicInteger cbCntr) {
this.rcvsEvts = rcvsEvts;
this.evtsFromCb = evtsFromCb;
this.cache = cache;
this.cntr = cntr;
this.cbCntr = cbCntr;
}
/** {@inheritDoc} */
@Override public void onUpdated(Iterable<CacheEntryEvent<? extends QueryTestKey, ? extends QueryTestValue>> evts)
throws CacheEntryListenerException {
for (CacheEntryEvent<? extends QueryTestKey, ? extends QueryTestValue> e : evts) {
if (e.getKey().compareTo(new QueryTestKey(KEYS)) < 0) {
rcvsEvts.add(new T2<>(e.getKey(), e.getValue()));
cntr.incrementAndGet();
if (cache != null) {
if (ThreadLocalRandom.current().nextBoolean()) {
Set<QueryTestKey> keys = new LinkedHashSet<>();
for (int key = KEYS; key < KEYS + KEYS_FROM_CALLBACK; key++)
keys.add(new QueryTestKey(key));
cache.invokeAll(keys, new IncrementTestEntryProcessor());
}
else {
for (int key = KEYS; key < KEYS + KEYS_FROM_CALLBACK; key++)
cache.invoke(new QueryTestKey(key), new IncrementTestEntryProcessor());
}
}
}
else {
evtsFromCb.add(new T2<>(e.getKey(), e.getValue()));
cbCntr.incrementAndGet();
}
}
}
}
/**
* @param cacheMode Cache mode.
* @param backups Number of backups.
* @param atomicityMode Cache atomicity mode.
* @param writeMode Write sync mode.
* @return Cache configuration.
*/
protected CacheConfiguration<Object, Object> cacheConfiguration(
CacheMode cacheMode,
int backups,
CacheAtomicityMode atomicityMode,
CacheWriteSynchronizationMode writeMode) {
CacheConfiguration<Object, Object> ccfg = new CacheConfiguration<>(DEFAULT_CACHE_NAME);
ccfg.setName("test-cache-" + atomicityMode + "-" + cacheMode + "-" + writeMode + "-" + backups);
ccfg.setAtomicityMode(atomicityMode);
ccfg.setCacheMode(cacheMode);
ccfg.setWriteSynchronizationMode(writeMode);
if (cacheMode == PARTITIONED)
ccfg.setBackups(backups);
return ccfg;
}
/**
*
*/
public static class QueryTestKey implements Serializable, Comparable {
/** */
private final Integer key;
/**
* @param key Key.
*/
public QueryTestKey(Integer key) {
this.key = key;
}
/** {@inheritDoc} */
@Override public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
QueryTestKey that = (QueryTestKey)o;
return key.equals(that.key);
}
/** {@inheritDoc} */
@Override public int hashCode() {
return key.hashCode();
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(QueryTestKey.class, this);
}
/** {@inheritDoc} */
@Override public int compareTo(Object o) {
return key - ((QueryTestKey)o).key;
}
}
/**
*
*/
public static class QueryTestValue implements Serializable {
/** */
@GridToStringInclude
protected final Integer val1;
/** */
@GridToStringInclude
protected final String val2;
/**
* @param val Value.
*/
public QueryTestValue(Integer val) {
this.val1 = val;
this.val2 = String.valueOf(val);
}
/** {@inheritDoc} */
@Override public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
QueryTestValue that = (QueryTestValue) o;
return val1.equals(that.val1) && val2.equals(that.val2);
}
/** {@inheritDoc} */
@Override public int hashCode() {
int res = val1.hashCode();
res = 31 * res + val2.hashCode();
return res;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(QueryTestValue.class, this);
}
}
}
| |
package com.yheriatovych.reductor.processor;
import com.google.testing.compile.JavaFileObjects;
import com.yheriatovych.reductor.processor.ReductorAnnotationProcessor;
import org.junit.Test;
import javax.tools.JavaFileObject;
import static com.google.common.truth.Truth.assertAbout;
import static com.google.testing.compile.JavaSourceSubjectFactory.javaSource;
public class AutoReducerValidationTest {
@Test
public void testFailIfReducerIsInterace() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public interface FoobarReducer extends Reducer<String> {\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("You can apply AutoReducer only to classes")
.in(source).onLine(7);
}
@Test
public void testFailIfReducerIsInnerClass() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"public class Test {\n" +
" @AutoReducer\n" +
" public abstract class FoobarReducer implements Reducer<String> {\n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("AutoReducer annotated reducers should not be inner classes. Probably 'static' modifier missing")
.in(source).onLine(8);
}
@Test
public void testCompilesIfReducerIsNestedClass() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"public class Test {\n" +
" @AutoReducer\n" +
" public static abstract class FoobarReducer implements Reducer<String> {\n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.compilesWithoutWarnings();
}
@Test
public void testCompilesIfParentImplementsReducer() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"abstract class SuperFoobarReducer implements Reducer<String> {\n" +
"}\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer extends SuperFoobarReducer {\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.compilesWithoutWarnings();
}
@Test
public void testFailIfReducerAndItsParentDoNotImplementReducer() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"abstract class SuperFoobarReducer {\n" +
"}\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer extends SuperFoobarReducer {\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("test.FoobarReducer should implement Reducer interface")
.in(source).onLine(8);
}
@Test
public void testFailIfReducerDoNotImplementReducer() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer {\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("test.FoobarReducer should implement Reducer interface")
.in(source).onLine(6);
}
@Test
public void testFailIfReturnTypeIsNotTheSameAsReducerTypeParameter() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer implements Reducer<String>{\n" +
" @AutoReducer.Action(\"ACTION_1\")\n" +
" int handleAction(String state, int number) {\n" +
" return number;\n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("Method handleAction(java.lang.String,int) should return type assignable to state type java.lang.String")
.in(source).onLine(9);
}
@Test
public void testFailIfHandlerHasNoArguments() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer implements Reducer<String>{\n" +
" @AutoReducer.Action(\"ACTION_1\")\n" +
" String handleAction() {\n" +
" return \"\";\n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("Method handleAction() should have at least 1 arguments: state of type java.lang.String")
.in(source).onLine(9);
}
@Test
public void testFailIfFirstArgumentIsNotStateType() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer implements Reducer<String>{\n" +
" @AutoReducer.Action(\"ACTION_1\")\n" +
" String handleAction(int action) {\n" +
" return \"\";\n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("First parameter action of method handleAction(int) should have the same type as state (java.lang.String)")
.in(source).onLine(9);
}
@Test
public void testGeneratedReducerWithMatchingConstructor() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer implements Reducer<String>{\n" +
" private FoobarReducer(int foo, String bar) {\n" +
" \n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("No accessible constructors available for class test.FoobarReducer")
.in(source)
.onLine(7);
}
@Test
public void testFailIfHandlerIsPrivate() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer implements Reducer<String>{\n" +
" @AutoReducer.Action(\"ACTION_1\")\n" +
" private String handleAction(String state) {\n" +
" return \"\";\n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("handleAction(java.lang.String) has 'private' modifier and is not accessible from child classes")
.in(source).onLine(9);
}
@Test
public void testUnboxReturnType() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer implements Reducer<Integer>{\n" +
" @AutoReducer.Action(\"ACTION_1\")\n" +
" int handleAction(Integer state) {\n" +
" return 0;\n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.compilesWithoutError();
}
@Test
public void testUnboxArgType() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer implements Reducer<Integer>{\n" +
" @AutoReducer.Action(\"ACTION_1\")\n" +
" Integer handleAction(int state) {\n" +
" return 0;\n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.compilesWithoutError();
}
@Test
public void testFailIfAnnotatedBothWithInitialStateAndAction() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer implements Reducer<Integer> {\n" +
" @AutoReducer.InitialState\n" +
" @AutoReducer.Action(\"ACTION_1\")\n" +
" int init() {\n" +
" return 42;\n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("Method init() should be may be annotated " +
"with either @AutoReducer.InitialState or @AutoReducer.Action but not both")
.in(source)
.onLine(10);
}
@Test
public void testFailIfTwoInitMethodsExists() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer implements Reducer<Integer> {\n" +
" @AutoReducer.InitialState\n" +
" int init1() {\n" +
" return 42;\n" +
" }\n" +
"\n" +
" @AutoReducer.InitialState\n" +
" int init2() {\n" +
" return 42;\n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("Methods init1() and init2() are both annotated with @AutoReducer.InitialState." +
" Only one @AutoReducer.InitialState method is allowed")
.in(source)
.onLine(14);
}
@Test
public void testFailIfInitMethodIsPrivate() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer implements Reducer<Integer> {\n" +
" @AutoReducer.InitialState\n" +
" private int init() {\n" +
" return 42;\n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("init() has 'private' modifier and is not accessible from child classes")
.in(source)
.onLine(9);
}
@Test
public void testFailIfInitMethodReturnsNotStateType() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer implements Reducer<Integer> {\n" +
" @AutoReducer.InitialState\n" +
" String init() {\n" +
" return \"\";\n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("Method init() should return type assignable to state type java.lang.Integer")
.in(source)
.onLine(9);
}
@Test
public void testFailIfInitMethodHasParameters() {
JavaFileObject source = JavaFileObjects.forSourceString("test.FoobarReducer", "// Generated by com.yheriatovych.reductor.processor.ReductorAnnotationProcessor (https://github.com/Yarikx/reductor)\n" +
"package test;" +
"\n" +
"import com.yheriatovych.reductor.Reducer;\n" +
"import com.yheriatovych.reductor.annotations.AutoReducer;\n" +
"\n" +
"@AutoReducer\n" +
"public abstract class FoobarReducer implements Reducer<Integer> {\n" +
" @AutoReducer.InitialState\n" +
" int init(int foobar) {\n" +
" return 42;\n" +
" }\n" +
"}");
assertAbout(javaSource()).that(source)
.withCompilerOptions("-Xlint:-processing")
.processedWith(new ReductorAnnotationProcessor())
.failsToCompile()
.withErrorContaining("Method init(int) should not have any parameters")
.in(source)
.onLine(9);
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.uiDesigner.make;
import com.intellij.lang.java.JavaParserDefinition;
import com.intellij.lexer.Lexer;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.patterns.ElementPattern;
import com.intellij.patterns.PlatformPatterns;
import com.intellij.patterns.PsiJavaPatterns;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.impl.source.tree.JavaDocElementType;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.uiDesigner.*;
import com.intellij.uiDesigner.compiler.*;
import com.intellij.uiDesigner.core.SupportCode;
import com.intellij.uiDesigner.lw.*;
import com.intellij.uiDesigner.shared.BorderType;
import com.intellij.util.IncorrectOperationException;
import gnu.trove.TIntObjectHashMap;
import gnu.trove.TObjectIntHashMap;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.*;
public final class FormSourceCodeGenerator {
private static final Logger LOG = Logger.getInstance("com.intellij.uiDesigner.make.FormSourceCodeGenerator");
@NonNls private StringBuffer myBuffer;
private Stack<Boolean> myIsFirstParameterStack;
private final Project myProject;
private final ArrayList<FormErrorInfo> myErrors;
private boolean myNeedLoadLabelText;
private boolean myNeedLoadButtonText;
private static final Map<Class, LayoutSourceGenerator> ourComponentLayoutCodeGenerators = new HashMap<Class, LayoutSourceGenerator>();
private static final Map<String, LayoutSourceGenerator> ourContainerLayoutCodeGenerators = new HashMap<String, LayoutSourceGenerator>();
@NonNls private static final TIntObjectHashMap<String> ourFontStyleMap = new TIntObjectHashMap<String>();
@NonNls private static final TIntObjectHashMap<String> ourTitleJustificationMap = new TIntObjectHashMap<String>();
@NonNls private static final TIntObjectHashMap<String> ourTitlePositionMap = new TIntObjectHashMap<String>();
private static final ElementPattern ourSuperCallPattern = PsiJavaPatterns.psiExpressionStatement().withFirstChild(PlatformPatterns.psiElement(PsiMethodCallExpression.class).withFirstChild(
PlatformPatterns.psiElement().withText(PsiKeyword.SUPER)));
static {
ourComponentLayoutCodeGenerators.put(LwSplitPane.class, new SplitPaneLayoutSourceGenerator());
ourComponentLayoutCodeGenerators.put(LwTabbedPane.class, new TabbedPaneLayoutSourceGenerator());
ourComponentLayoutCodeGenerators.put(LwScrollPane.class, new ScrollPaneLayoutSourceGenerator());
ourComponentLayoutCodeGenerators.put(LwToolBar.class, new ToolBarLayoutSourceGenerator());
ourFontStyleMap.put(Font.PLAIN, "java.awt.Font.PLAIN");
ourFontStyleMap.put(Font.BOLD, "java.awt.Font.BOLD");
ourFontStyleMap.put(Font.ITALIC, "java.awt.Font.ITALIC");
ourFontStyleMap.put(Font.BOLD | Font.ITALIC, "java.awt.Font.BOLD | java.awt.Font.ITALIC");
ourTitlePositionMap.put(0, "javax.swing.border.TitledBorder.DEFAULT_POSITION");
ourTitlePositionMap.put(1, "javax.swing.border.TitledBorder.ABOVE_TOP");
ourTitlePositionMap.put(2, "javax.swing.border.TitledBorder.TOP");
ourTitlePositionMap.put(3, "javax.swing.border.TitledBorder.BELOW_TOP");
ourTitlePositionMap.put(4, "javax.swing.border.TitledBorder.ABOVE_BOTTOM");
ourTitlePositionMap.put(5, "javax.swing.border.TitledBorder.BOTTOM");
ourTitlePositionMap.put(6, "javax.swing.border.TitledBorder.BELOW_BOTTOM");
ourTitleJustificationMap.put(0, "javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION");
ourTitleJustificationMap.put(1, "javax.swing.border.TitledBorder.LEFT");
ourTitleJustificationMap.put(2, "javax.swing.border.TitledBorder.CENTER");
ourTitleJustificationMap.put(3, "javax.swing.border.TitledBorder.RIGHT");
ourTitleJustificationMap.put(4, "javax.swing.border.TitledBorder.LEADING");
ourTitleJustificationMap.put(5, "javax.swing.border.TitledBorder.TRAILING");
}
public FormSourceCodeGenerator(@NotNull final Project project){
myProject = project;
myErrors = new ArrayList<FormErrorInfo>();
}
public void generate(final VirtualFile formFile) {
myNeedLoadLabelText = false;
myNeedLoadButtonText = false;
final Module module = ModuleUtil.findModuleForFile(formFile, myProject);
if (module == null) {
return;
}
// ensure that new instances of generators are used for every run
ourContainerLayoutCodeGenerators.clear();
ourContainerLayoutCodeGenerators.put(UIFormXmlConstants.LAYOUT_INTELLIJ, new GridLayoutSourceGenerator());
ourContainerLayoutCodeGenerators.put(UIFormXmlConstants.LAYOUT_GRIDBAG, new GridBagLayoutSourceGenerator());
ourContainerLayoutCodeGenerators.put(UIFormXmlConstants.LAYOUT_BORDER, new BorderLayoutSourceGenerator());
ourContainerLayoutCodeGenerators.put(UIFormXmlConstants.LAYOUT_FLOW, new FlowLayoutSourceGenerator());
ourContainerLayoutCodeGenerators.put(UIFormXmlConstants.LAYOUT_CARD, new CardLayoutSourceGenerator());
ourContainerLayoutCodeGenerators.put(UIFormXmlConstants.LAYOUT_FORM, new FormLayoutSourceGenerator());
myErrors.clear();
final PsiPropertiesProvider propertiesProvider = new PsiPropertiesProvider(module);
final Document doc = FileDocumentManager.getInstance().getDocument(formFile);
final LwRootContainer rootContainer;
try {
rootContainer = Utils.getRootContainer(doc.getText(), propertiesProvider);
}
catch (AlienFormFileException ignored) {
// ignoring this file
return;
}
catch (Exception e) {
myErrors.add(new FormErrorInfo(null, UIDesignerBundle.message("error.cannot.process.form.file", e)));
return;
}
if (rootContainer.getClassToBind() == null) {
// form skipped - no class to bind
return;
}
ErrorAnalyzer.analyzeErrors(module, formFile, null, rootContainer, null);
FormEditingUtil.iterate(
rootContainer,
new FormEditingUtil.ComponentVisitor<LwComponent>() {
public boolean visit(final LwComponent iComponent) {
final ErrorInfo errorInfo = ErrorAnalyzer.getErrorForComponent(iComponent);
if (errorInfo != null) {
String message;
if (iComponent.getBinding() != null) {
message = UIDesignerBundle.message("error.for.component", iComponent.getBinding(), errorInfo.myDescription);
}
else {
message = errorInfo.myDescription;
}
myErrors.add(new FormErrorInfo(iComponent.getId(), message));
}
return true;
}
}
);
if (myErrors.size() != 0) {
return;
}
try {
_generate(rootContainer, module);
}
catch (ClassToBindNotFoundException e) {
// ignore
}
catch (CodeGenerationException e) {
myErrors.add(new FormErrorInfo(e.getComponentId(), e.getMessage()));
}
catch (IncorrectOperationException e) {
myErrors.add(new FormErrorInfo(null, e.getMessage()));
}
}
public ArrayList<FormErrorInfo> getErrors() {
return myErrors;
}
private void _generate(final LwRootContainer rootContainer, final Module module) throws CodeGenerationException, IncorrectOperationException{
myBuffer = new StringBuffer();
myIsFirstParameterStack = new Stack<Boolean>();
final HashMap<LwComponent,String> component2variable = new HashMap<LwComponent,String>();
final TObjectIntHashMap<String> class2variableIndex = new TObjectIntHashMap<String>();
final HashMap<String,LwComponent> id2component = new HashMap<String, LwComponent>();
if (rootContainer.getComponentCount() != 1) {
throw new CodeGenerationException(null, UIDesignerBundle.message("error.one.toplevel.component.required"));
}
final LwComponent topComponent = (LwComponent)rootContainer.getComponent(0);
String id = Utils.findNotEmptyPanelWithXYLayout(topComponent);
if (id != null) {
throw new CodeGenerationException(id, UIDesignerBundle.message("error.nonempty.xy.panels.found"));
}
final PsiClass classToBind = FormEditingUtil.findClassToBind(module, rootContainer.getClassToBind());
if (classToBind == null) {
throw new ClassToBindNotFoundException(UIDesignerBundle.message("error.class.to.bind.not.found", rootContainer.getClassToBind()));
}
final boolean haveCustomCreateComponents = Utils.getCustomCreateComponentCount(rootContainer) > 0;
if (haveCustomCreateComponents) {
if (FormEditingUtil.findCreateComponentsMethod(classToBind) == null) {
throw new CodeGenerationException(null, UIDesignerBundle.message("error.no.custom.create.method"));
}
myBuffer.append(AsmCodeGenerator.CREATE_COMPONENTS_METHOD_NAME).append("();");
}
generateSetupCodeForComponent(topComponent,
component2variable,
class2variableIndex,
id2component, module, classToBind);
generateComponentReferenceProperties(topComponent, component2variable, class2variableIndex, id2component, classToBind);
generateButtonGroups(rootContainer, component2variable, class2variableIndex, id2component, classToBind);
final String methodText = myBuffer.toString();
final PsiManager psiManager = PsiManager.getInstance(module.getProject());
final PsiElementFactory elementFactory = JavaPsiFacade.getInstance(psiManager.getProject()).getElementFactory();
PsiClass newClass = (PsiClass) classToBind.copy();
cleanup(newClass);
// [anton] the comments are written according to the SCR 26896
final PsiClass fakeClass = elementFactory.createClassFromText(
"{\n" +
"// GUI initializer generated by " + ApplicationNamesInfo.getInstance().getFullProductName() + " GUI Designer\n" +
"// >>> IMPORTANT!! <<<\n" +
"// DO NOT EDIT OR ADD ANY CODE HERE!\n" +
"" + AsmCodeGenerator.SETUP_METHOD_NAME + "();\n" +
"}\n" +
"\n" +
"/** Method generated by " + ApplicationNamesInfo.getInstance().getFullProductName() + " GUI Designer\n" +
" * >>> IMPORTANT!! <<<\n" +
" * DO NOT edit this method OR call it in your code!\n" +
" * @noinspection ALL\n" +
" */\n" +
"private void " + AsmCodeGenerator.SETUP_METHOD_NAME + "()\n" +
"{\n" +
methodText +
"}\n",
null
);
final CodeStyleManager formatter = CodeStyleManager.getInstance(module.getProject());
final JavaCodeStyleManager styler = JavaCodeStyleManager.getInstance(module.getProject());
PsiMethod method = (PsiMethod) newClass.add(fakeClass.getMethods()[0]);
// don't generate initializer block if $$$setupUI$$$() is called explicitly from one of the constructors
boolean needInitializer = true;
boolean needSetupUI = false;
for(PsiMethod constructor: newClass.getConstructors()) {
if (containsMethodIdentifier(constructor, method)) {
needInitializer = false;
}
else if (haveCustomCreateComponents && hasCustomComponentAffectingReferences(constructor, newClass, rootContainer, null)) {
needInitializer = false;
needSetupUI = true;
}
}
if (needSetupUI) {
for(PsiMethod constructor: newClass.getConstructors()) {
addSetupUICall(constructor, rootContainer, method);
}
}
if (needInitializer) {
newClass.addBefore(fakeClass.getInitializers()[0], method);
}
@NonNls final String grcMethodText = "/** @noinspection ALL */ public javax.swing.JComponent " +
AsmCodeGenerator.GET_ROOT_COMPONENT_METHOD_NAME +
"() { return " + topComponent.getBinding() + "; }";
generateMethodIfRequired(newClass, method, AsmCodeGenerator.GET_ROOT_COMPONENT_METHOD_NAME, grcMethodText, topComponent.getBinding() != null);
final String loadButtonTextMethodText = getLoadMethodText(AsmCodeGenerator.LOAD_BUTTON_TEXT_METHOD, AbstractButton.class, module);
generateMethodIfRequired(newClass, method, AsmCodeGenerator.LOAD_BUTTON_TEXT_METHOD, loadButtonTextMethodText, myNeedLoadButtonText);
final String loadLabelTextMethodText = getLoadMethodText(AsmCodeGenerator.LOAD_LABEL_TEXT_METHOD, JLabel.class, module);
generateMethodIfRequired(newClass, method, AsmCodeGenerator.LOAD_LABEL_TEXT_METHOD, loadLabelTextMethodText, myNeedLoadLabelText);
newClass = (PsiClass) styler.shortenClassReferences(newClass);
newClass = (PsiClass) formatter.reformat(newClass);
if (!lexemsEqual(classToBind, newClass)) {
classToBind.replace(newClass);
}
}
private static void addSetupUICall(final PsiMethod constructor, final LwRootContainer rootContainer, final PsiMethod setupUIMethod) {
final PsiCodeBlock psiCodeBlock = constructor.getBody();
if (psiCodeBlock == null) {
return;
}
final PsiClass classToBind = constructor.getContainingClass();
final PsiStatement[] statements = psiCodeBlock.getStatements();
PsiElement anchor = psiCodeBlock.getRBrace();
Ref<Boolean> callsThisConstructor = new Ref<Boolean>(Boolean.FALSE);
for(PsiStatement statement: statements) {
if (containsMethodIdentifier(statement, setupUIMethod)) {
return;
}
if (!ourSuperCallPattern.accepts(statement) &&
hasCustomComponentAffectingReferences(statement, classToBind, rootContainer, callsThisConstructor)) {
anchor = statement;
break;
}
}
if (!callsThisConstructor.get().booleanValue()) {
final PsiElementFactory factory = JavaPsiFacade.getInstance(constructor.getProject()).getElementFactory();
try {
PsiStatement setupUIStatement = factory.createStatementFromText(AsmCodeGenerator.SETUP_METHOD_NAME + "();", constructor);
psiCodeBlock.addBefore(setupUIStatement, anchor);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
}
private static boolean hasCustomComponentAffectingReferences(final PsiElement element,
final PsiClass classToBind,
final LwRootContainer rootContainer,
@Nullable final Ref<Boolean> callsThisConstructor) {
final Ref<Boolean> result = new Ref<Boolean>(Boolean.FALSE);
element.accept(new JavaRecursiveElementWalkingVisitor() {
@Override public void visitReferenceExpression(final PsiReferenceExpression expression) {
super.visitReferenceElement(expression);
final PsiElement psiElement = expression.resolve();
if (psiElement == null) {
return;
}
if (psiElement instanceof PsiField) {
PsiField field = (PsiField) psiElement;
if (field.getContainingClass().equals(classToBind)) {
if (Utils.isBoundField(rootContainer, field.getName())) {
result.set(Boolean.TRUE);
}
}
}
else if (psiElement instanceof PsiMethod) {
PsiMethod method = (PsiMethod) psiElement;
if (method.isConstructor()) {
if (method.getContainingClass() == classToBind) {
if (callsThisConstructor != null) {
callsThisConstructor.set(Boolean.TRUE);
}
}
else if (method.getContainingClass() != classToBind.getSuperClass()) {
result.set(Boolean.TRUE);
}
}
else {
result.set(Boolean.TRUE);
}
}
}
});
return result.get().booleanValue();
}
private static boolean lexemsEqual(final PsiClass classToBind, final PsiClass newClass) {
Lexer oldTextLexer = JavaParserDefinition.createLexer(LanguageLevel.HIGHEST);
Lexer newTextLexer = JavaParserDefinition.createLexer(LanguageLevel.HIGHEST);
String oldBuffer = classToBind.getText();
String newBuffer = newClass.getText();
oldTextLexer.start(oldBuffer);
newTextLexer.start(newBuffer);
while(true) {
IElementType oldLexem = oldTextLexer.getTokenType();
IElementType newLexem = newTextLexer.getTokenType();
if (oldLexem == null || newLexem == null) {
// must terminate at the same time
return oldLexem == null && newLexem == null;
}
if (oldLexem != newLexem) {
return false;
}
if (oldLexem != TokenType.WHITE_SPACE && oldLexem != JavaDocElementType.DOC_COMMENT) {
int oldStart = oldTextLexer.getTokenStart();
int newStart = newTextLexer.getTokenStart();
int oldLength = oldTextLexer.getTokenEnd() - oldTextLexer.getTokenStart();
int newLength = newTextLexer.getTokenEnd() - newTextLexer.getTokenStart();
if (oldLength != newLength) {
return false;
}
for(int i=0; i<oldLength; i++) {
if (oldBuffer.charAt(oldStart+i) != newBuffer.charAt(newStart+i)) {
return false;
}
}
}
oldTextLexer.advance();
newTextLexer.advance();
}
}
@NonNls
private String getLoadMethodText(final String methodName, final Class componentClass, Module module) {
final boolean needIndex = haveSetDisplayedMnemonic(componentClass, module);
return
"/** @noinspection ALL */ " +
"private void " + methodName + "(" + componentClass.getName() + " component, java.lang.String text) {" +
" StringBuffer result = new StringBuffer(); " +
" boolean haveMnemonic = false; " +
" char mnemonic = '\\0';" +
(needIndex ? "int mnemonicIndex = -1;" : "") +
" for(int i=0; i<text.length(); i++) {" +
" if (text.charAt(i) == '&') {" +
" i++;" +
" if (i == text.length()) break;" +
" if (!haveMnemonic && text.charAt(i) != '&') {" +
" haveMnemonic = true;" +
" mnemonic = text.charAt(i);" +
(needIndex ? "mnemonicIndex = result.length();" : "") +
" }" +
" }" +
" result.append(text.charAt(i));" +
" }" +
" component.setText(result.toString()); " +
" if (haveMnemonic) {" +
(componentClass.equals(AbstractButton.class)
? " component.setMnemonic(mnemonic);"
: " component.setDisplayedMnemonic(mnemonic);") +
(needIndex ? "component.setDisplayedMnemonicIndex(mnemonicIndex);" : "") +
"} }";
}
private void generateMethodIfRequired(PsiClass aClass, PsiMethod anchor, final String methodName, String methodText, boolean condition) throws IncorrectOperationException {
PsiElementFactory elementFactory = JavaPsiFacade.getInstance(myProject).getElementFactory();
PsiMethod newMethod = null;
PsiMethod[] oldMethods = aClass.findMethodsByName(methodName, false);
if (!condition) {
for(PsiMethod oldMethod: oldMethods) {
oldMethod.delete();
}
}
else {
newMethod = elementFactory.createMethodFromText(methodText, aClass);
if (oldMethods.length > 0) {
newMethod = (PsiMethod) oldMethods [0].replace(newMethod);
}
else {
newMethod = (PsiMethod) aClass.addAfter(newMethod, anchor);
}
}
}
public static void cleanup(final PsiClass aClass) throws IncorrectOperationException{
final PsiMethod[] methods = aClass.findMethodsByName(AsmCodeGenerator.SETUP_METHOD_NAME, false);
for (final PsiMethod method: methods) {
final PsiClassInitializer[] initializers = aClass.getInitializers();
for (final PsiClassInitializer initializer : initializers) {
if (containsMethodIdentifier(initializer, method)) {
initializer.delete();
}
}
method.delete();
}
deleteMethods(aClass, AsmCodeGenerator.GET_ROOT_COMPONENT_METHOD_NAME);
deleteMethods(aClass, AsmCodeGenerator.LOAD_BUTTON_TEXT_METHOD);
deleteMethods(aClass, AsmCodeGenerator.LOAD_LABEL_TEXT_METHOD);
}
private static void deleteMethods(final PsiClass aClass, final String methodName) throws IncorrectOperationException {
final PsiMethod[] grcMethods = aClass.findMethodsByName(methodName, false);
for(final PsiMethod grcMethod: grcMethods) {
grcMethod.delete();
}
}
private static boolean containsMethodIdentifier(final PsiElement element, final PsiMethod setupMethod) {
if (element instanceof PsiMethodCallExpression) {
final PsiMethod psiMethod = ((PsiMethodCallExpression)element).resolveMethod();
if (setupMethod.equals(psiMethod)){
return true;
}
}
final PsiElement[] children = element.getChildren();
for (int i = children.length - 1; i >= 0; i--) {
if (containsMethodIdentifier(children[i], setupMethod)) {
return true;
}
}
return false;
}
private void generateSetupCodeForComponent(final LwComponent component,
final HashMap<LwComponent, String> component2TempVariable,
final TObjectIntHashMap<String> class2variableIndex,
final HashMap<String, LwComponent> id2component,
final Module module,
final PsiClass aClass) throws CodeGenerationException{
id2component.put(component.getId(), component);
GlobalSearchScope globalSearchScope = module.getModuleWithDependenciesAndLibrariesScope(false);
final LwContainer parent = component.getParent();
final String variable = getVariable(component, component2TempVariable, class2variableIndex, aClass);
final String componentClass = component instanceof LwNestedForm
? getNestedFormClass(module, (LwNestedForm) component)
: getComponentLayoutGenerator(component.getParent()).mapComponentClass(component.getComponentClassName());
if (component.isCustomCreate() && component.getBinding() == null) {
throw new CodeGenerationException(component.getId(), UIDesignerBundle.message("error.custom.create.no.binding"));
}
if (!component.isCustomCreate()) {
final String binding = component.getBinding();
if (binding != null) {
myBuffer.append(binding);
}
else {
myBuffer.append("final ");
myBuffer.append(componentClass);
myBuffer.append(" ");
myBuffer.append(variable);
}
myBuffer.append('=');
startConstructor(componentClass);
endConstructor(); // will finish the line
}
if (component instanceof LwContainer) {
final LwContainer container = (LwContainer)component;
if (!container.isCustomCreate() || container.getComponentCount() > 0) {
getComponentLayoutGenerator(container).generateContainerLayout(container, this, variable);
}
}
// introspected properties
final LwIntrospectedProperty[] introspectedProperties = component.getAssignedIntrospectedProperties();
// see SCR #35990
Arrays.sort(introspectedProperties, new Comparator<LwIntrospectedProperty>() {
public int compare(LwIntrospectedProperty p1, LwIntrospectedProperty p2) {
return p1.getName().compareTo(p2.getName());
}
});
for (final LwIntrospectedProperty property : introspectedProperties) {
if (property instanceof LwIntroComponentProperty) {
// component properties are processed in second pass
continue;
}
Object value = component.getPropertyValue(property);
//noinspection HardCodedStringLiteral
final boolean isTextWithMnemonicProperty =
"text".equals(property.getName()) &&
(isAssignableFrom(AbstractButton.class.getName(), componentClass, globalSearchScope) ||
isAssignableFrom(JLabel.class.getName(), componentClass, globalSearchScope));
// handle resource bundles
if (property instanceof LwRbIntroStringProperty) {
final StringDescriptor descriptor = (StringDescriptor)value;
if (descriptor.getValue() == null) {
if (isTextWithMnemonicProperty) {
if (isAssignableFrom(AbstractButton.class.getName(), componentClass, globalSearchScope)) {
myNeedLoadButtonText = true;
startMethodCall("this", AsmCodeGenerator.LOAD_BUTTON_TEXT_METHOD);
pushVar(variable);
push(descriptor);
endMethod();
}
else {
myNeedLoadLabelText = true;
startMethodCall("this", AsmCodeGenerator.LOAD_LABEL_TEXT_METHOD);
pushVar(variable);
push(descriptor);
endMethod();
}
}
else {
startMethodCall(variable, property.getWriteMethodName());
push(descriptor);
endMethod();
}
continue;
}
else {
value = descriptor.getValue();
}
}
else if (property instanceof LwIntroListModelProperty) {
generateListModelProperty(property, class2variableIndex, aClass, value, variable);
continue;
}
SupportCode.TextWithMnemonic textWithMnemonic = null;
if (isTextWithMnemonicProperty) {
textWithMnemonic = SupportCode.parseText((String)value);
value = textWithMnemonic.myText;
}
final String propertyClass = property.getPropertyClassName();
if (propertyClass.equals(Color.class.getName())) {
ColorDescriptor descriptor = (ColorDescriptor) value;
if (!descriptor.isColorSet()) continue;
}
startMethodCall(variable, property.getWriteMethodName());
if (propertyClass.equals(Dimension.class.getName())) {
newDimension((Dimension)value);
}
else if (propertyClass.equals(Integer.class.getName())) {
push(((Integer)value).intValue());
}
else if (propertyClass.equals(Double.class.getName())) {
push(((Double)value).doubleValue());
}
else if (propertyClass.equals(Float.class.getName())) {
push(((Float)value).floatValue());
}
else if (propertyClass.equals(Long.class.getName())) {
push(((Long) value).longValue());
}
else if (propertyClass.equals(Short.class.getName())) {
push(((Short) value).shortValue());
}
else if (propertyClass.equals(Byte.class.getName())) {
push(((Byte) value).byteValue());
}
else if (propertyClass.equals(Character.class.getName())) {
push(((Character) value).charValue());
}
else if (propertyClass.equals(Boolean.class.getName())) {
push(((Boolean)value).booleanValue());
}
else if (propertyClass.equals(Rectangle.class.getName())) {
newRectangle((Rectangle)value);
}
else if (propertyClass.equals(Insets.class.getName())) {
newInsets((Insets)value);
}
else if (propertyClass.equals(String.class.getName())) {
push((String)value);
}
else if (propertyClass.equals(Color.class.getName())) {
pushColor((ColorDescriptor) value);
}
else if (propertyClass.equals(Font.class.getName())) {
pushFont(variable, (FontDescriptor) value, property.getReadMethodName());
}
else if (propertyClass.equals(Icon.class.getName())) {
pushIcon((IconDescriptor) value);
}
else if (property instanceof LwIntroEnumProperty) {
pushVar(propertyClass.replace('$', '.') + "." + value.toString());
}
else {
throw new RuntimeException("unexpected property class: " + propertyClass);
}
endMethod();
// special handling of mnemonics
if (!isTextWithMnemonicProperty) {
continue;
}
if (textWithMnemonic.myMnemonicIndex == -1) {
continue;
}
if (isAssignableFrom(AbstractButton.class.getName(), componentClass, globalSearchScope)) {
generateSetMnemonic(variable, textWithMnemonic, module, "setMnemonic", AbstractButton.class);
}
else if (isAssignableFrom(JLabel.class.getName(), componentClass, globalSearchScope)) {
generateSetMnemonic(variable, textWithMnemonic, module, "setDisplayedMnemonic", JLabel.class);
}
}
generateClientProperties(component, variable);
// add component to parent
if (!(component.getParent() instanceof LwRootContainer)) {
final String parentVariable = getVariable(parent, component2TempVariable, class2variableIndex, aClass);
String componentVar = variable;
if (component instanceof LwNestedForm) {
componentVar = variable + "." + AsmCodeGenerator.GET_ROOT_COMPONENT_METHOD_NAME + "()";
}
getComponentLayoutGenerator(component.getParent()).generateComponentLayout(component, this, componentVar, parentVariable);
}
if (component instanceof LwContainer) {
final LwContainer container = (LwContainer)component;
generateBorder(container, variable);
for (int i = 0; i < container.getComponentCount(); i++) {
generateSetupCodeForComponent((LwComponent)container.getComponent(i), component2TempVariable, class2variableIndex, id2component,
module, aClass);
}
}
}
private void generateSetMnemonic(final String variable, final SupportCode.TextWithMnemonic textWithMnemonic, final Module module,
@NonNls final String setMethodName, final Class controlClass) {
startMethodCall(variable, setMethodName);
pushVar("'" + textWithMnemonic.getMnemonicChar() + "'");
endMethod();
if (haveSetDisplayedMnemonic(controlClass, module)) {
// generated code needs to be compatible with jdk 1.3
startMethodCall(variable, "setDisplayedMnemonicIndex");
push(textWithMnemonic.myMnemonicIndex);
endMethod();
}
}
private boolean haveSetDisplayedMnemonic(final Class controlClass, final Module module) {
PsiClass aClass = JavaPsiFacade.getInstance(myProject).findClass(controlClass.getName(), module.getModuleWithLibrariesScope());
return aClass != null && aClass.findMethodsByName("setDisplayedMnemonicIndex", true).length > 0;
}
private void generateListModelProperty(final LwIntrospectedProperty property, final TObjectIntHashMap<String> class2variableIndex,
final PsiClass aClass, final Object value, final String variable) {
String valueClassName;
if (property.getPropertyClassName().equals(ComboBoxModel.class.getName())) {
valueClassName = DefaultComboBoxModel.class.getName();
}
else {
valueClassName = DefaultListModel.class.getName();
}
String modelVarName = generateUniqueVariableName(valueClassName, class2variableIndex, aClass);
myBuffer.append("final ");
myBuffer.append(valueClassName);
myBuffer.append(" ");
myBuffer.append(modelVarName);
myBuffer.append("= new ").append(valueClassName).append("();");
String[] items = (String[]) value;
for(String item: items) {
startMethodCall(modelVarName, "addElement");
push(item);
endMethod();
}
startMethodCall(variable, property.getWriteMethodName());
pushVar(modelVarName);
endMethod();
}
private void generateBorder(final LwContainer container, final String variable) {
final BorderType borderType = container.getBorderType();
final StringDescriptor borderTitle = container.getBorderTitle();
final Insets borderSize = container.getBorderSize();
final String borderFactoryMethodName = borderType.getBorderFactoryMethodName();
final boolean borderNone = borderType.equals(BorderType.NONE);
if (!borderNone || borderTitle != null) {
startMethodCall(variable, "setBorder");
startStaticMethodCall(BorderFactory.class, "createTitledBorder");
if (!borderNone) {
startStaticMethodCall(BorderFactory.class, borderFactoryMethodName);
if (borderType.equals(BorderType.LINE)) {
if (container.getBorderColor() == null) {
pushVar("java.awt.Color.black");
}
else {
pushColor(container.getBorderColor());
}
}
else if (borderType.equals(BorderType.EMPTY) && borderSize != null) {
push(borderSize.top);
push(borderSize.left);
push(borderSize.bottom);
push(borderSize.right);
}
endMethod();
}
else if (isCustomBorder(container)) {
push((String) null);
}
push(borderTitle);
if (isCustomBorder(container)) {
push(container.getBorderTitleJustification(), ourTitleJustificationMap);
push(container.getBorderTitlePosition(), ourTitlePositionMap);
if (container.getBorderTitleFont() != null || container.getBorderTitleColor() != null) {
if (container.getBorderTitleFont() == null) {
push((String) null);
}
else {
pushFont(variable, container.getBorderTitleFont(), "getFont");
}
if (container.getBorderTitleColor() != null) {
pushColor(container.getBorderTitleColor());
}
}
}
endMethod(); // createTitledBorder
endMethod(); // setBorder
}
}
private static boolean isCustomBorder(final LwContainer container) {
return container.getBorderTitleJustification() != 0 || container.getBorderTitlePosition() != 0 ||
container.getBorderTitleColor() != null || container.getBorderTitleFont() != null;
}
private void generateClientProperties(final LwComponent component, final String variable) throws CodeGenerationException {
HashMap props = component.getDelegeeClientProperties();
for (final Object o : props.entrySet()) {
Map.Entry e = (Map.Entry)o;
startMethodCall(variable, "putClientProperty");
push((String) e.getKey());
Object value = e.getValue();
if (value instanceof StringDescriptor) {
push(((StringDescriptor) value).getValue());
}
else if (value instanceof Boolean) {
if (((Boolean) value).booleanValue()) {
pushVar("Boolean.TRUE");
}
else {
pushVar("Boolean.FALSE");
}
}
else {
startConstructor(value.getClass().getName());
if (value instanceof Integer) {
push(((Integer) value).intValue());
}
else if (value instanceof Double) {
push(((Double) value).doubleValue());
}
else {
throw new CodeGenerationException(component.getId(), "Unknown client property value type");
}
endConstructor();
}
endMethod();
}
}
private static String getNestedFormClass(Module module, final LwNestedForm nestedForm) throws CodeGenerationException {
final LwRootContainer container;
try {
container = new PsiNestedFormLoader(module).loadForm(nestedForm.getFormFileName());
return container.getClassToBind();
}
catch (Exception e) {
throw new CodeGenerationException(nestedForm.getId(), e.getMessage());
}
}
private void generateComponentReferenceProperties(final LwComponent component,
final HashMap<LwComponent, String> component2variable,
final TObjectIntHashMap<String> class2variableIndex,
final HashMap<String, LwComponent> id2component,
final PsiClass aClass) {
String variable = getVariable(component, component2variable, class2variableIndex, aClass);
final LwIntrospectedProperty[] introspectedProperties = component.getAssignedIntrospectedProperties();
for (final LwIntrospectedProperty property : introspectedProperties) {
if (property instanceof LwIntroComponentProperty) {
String componentId = (String) component.getPropertyValue(property);
if (componentId != null && componentId.length() > 0) {
LwComponent target = id2component.get(componentId);
if (target != null) {
String targetVariable = getVariable(target, component2variable, class2variableIndex, aClass);
startMethodCall(variable, property.getWriteMethodName());
pushVar(targetVariable);
endMethod();
}
}
}
}
if (component instanceof LwContainer) {
final LwContainer container = (LwContainer)component;
for (int i = 0; i < container.getComponentCount(); i++) {
generateComponentReferenceProperties((LwComponent)container.getComponent(i), component2variable, class2variableIndex, id2component,
aClass);
}
}
}
private void generateButtonGroups(final LwRootContainer rootContainer,
final HashMap<LwComponent, String> component2variable,
final TObjectIntHashMap<String> class2variableIndex,
final HashMap<String, LwComponent> id2component,
final PsiClass aClass) {
IButtonGroup[] groups = rootContainer.getButtonGroups();
boolean haveGroupDeclaration = false;
for(IButtonGroup group: groups) {
boolean haveGroupConstructor = false;
String[] ids = group.getComponentIds();
for(String id: ids) {
LwComponent target = id2component.get(id);
if (target != null) {
if (!haveGroupConstructor) {
if (group.isBound()) {
append(group.getName());
}
else {
if (!haveGroupDeclaration) {
append("javax.swing.ButtonGroup buttonGroup;");
haveGroupDeclaration = true;
}
append("buttonGroup");
}
append("= new javax.swing.ButtonGroup();");
haveGroupConstructor = true;
}
String targetVariable = getVariable(target, component2variable, class2variableIndex, aClass);
startMethodCall(group.isBound() ? group.getName() : "buttonGroup", "add");
pushVar(targetVariable);
endMethod();
}
}
}
}
private static LayoutSourceGenerator getComponentLayoutGenerator(final LwContainer container) {
LayoutSourceGenerator generator = ourComponentLayoutCodeGenerators.get(container.getClass());
if (generator != null) {
return generator;
}
LwContainer parent = container;
while(parent != null) {
final String layoutManager = parent.getLayoutManager();
if (layoutManager != null && layoutManager.length() > 0) {
generator = ourContainerLayoutCodeGenerators.get(layoutManager);
if (generator != null) {
return generator;
}
}
parent = parent.getParent();
}
return GridLayoutSourceGenerator.INSTANCE;
}
void push(final StringDescriptor descriptor) {
if (descriptor == null) {
push((String)null);
}
else if (descriptor.getValue() != null) {
push(descriptor.getValue());
}
else {
startMethodCall("java.util.ResourceBundle.getBundle(\"" + descriptor.getBundleName() + "\")", "getString");
push(descriptor.getKey());
endMethod();
}
}
private void pushColor(final ColorDescriptor descriptor) {
if (descriptor.getColor() != null) {
startConstructor(Color.class.getName());
push(descriptor.getColor().getRGB());
endConstructor();
}
else if (descriptor.getSwingColor() != null) {
startStaticMethodCall(UIManager.class, "getColor");
push(descriptor.getSwingColor());
endMethod();
}
else if (descriptor.getSystemColor() != null) {
pushVar("java.awt.SystemColor." + descriptor.getSystemColor());
}
else if (descriptor.getAWTColor() != null) {
pushVar("java.awt.Color." + descriptor.getAWTColor());
}
else {
throw new IllegalStateException("Unknown color type");
}
}
private void pushFont(final String variable, final FontDescriptor fontDescriptor, @NonNls final String getterName) {
if (fontDescriptor.getSwingFont() != null) {
startStaticMethodCall(UIManager.class, "getFont");
push(fontDescriptor.getSwingFont());
endMethod();
}
else {
startConstructor(Font.class.getName());
if (fontDescriptor.getFontName() != null) {
push(fontDescriptor.getFontName());
}
else {
pushVar(variable + "." + getterName + "().getName()");
}
if (fontDescriptor.getFontStyle() >= 0) {
push(fontDescriptor.getFontStyle(), ourFontStyleMap);
}
else {
pushVar(variable + "." + getterName + "().getStyle()");
}
if (fontDescriptor.getFontSize() >= 0) {
push(fontDescriptor.getFontSize());
}
else {
pushVar(variable + "." + getterName + "().getSize()");
}
endMethod();
}
}
public void pushIcon(final IconDescriptor iconDescriptor) {
startConstructor(ImageIcon.class.getName());
startMethodCall("getClass()", "getResource");
push("/" + iconDescriptor.getIconPath());
endMethod();
endMethod();
}
private boolean isAssignableFrom(final String className, final String fromName, final GlobalSearchScope scope) {
final PsiClass aClass = JavaPsiFacade.getInstance(myProject).findClass(className, scope);
final PsiClass fromClass = JavaPsiFacade.getInstance(myProject).findClass(fromName, scope);
if (aClass == null || fromClass == null) {
return false;
}
return InheritanceUtil.isInheritorOrSelf(fromClass, aClass, true);
}
/**
* @return variable idx
*/
private static String getVariable(final LwComponent component,
final HashMap<LwComponent, String> component2variable,
final TObjectIntHashMap<String> class2variableIndex,
final PsiClass aClass) {
if (component2variable.containsKey(component)) {
return component2variable.get(component);
}
if (component.getBinding() != null) {
return component.getBinding();
}
@NonNls final String className = component instanceof LwNestedForm ? "nestedForm" : component.getComponentClassName();
String result = generateUniqueVariableName(className, class2variableIndex, aClass);
component2variable.put(component, result);
return result;
}
private static String generateUniqueVariableName(@NonNls final String className, final TObjectIntHashMap<String> class2variableIndex,
final PsiClass aClass) {
final String shortName;
if (className.startsWith("javax.swing.J")) {
shortName = className.substring("javax.swing.J".length());
}
else {
final int idx = className.lastIndexOf('.');
if (idx != -1) {
shortName = className.substring(idx + 1);
}
else {
shortName = className;
}
}
if (!class2variableIndex.containsKey(className)) class2variableIndex.put(className, 0);
String result;
do {
class2variableIndex.increment(className);
int newIndex = class2variableIndex.get(className);
result = Character.toLowerCase(shortName.charAt(0)) + shortName.substring(1) + newIndex;
} while(aClass.findFieldByName(result, true) != null);
return result;
}
void newDimensionOrNull(final Dimension dimension) {
if (dimension.width == -1 && dimension.height == -1) {
checkParameter();
myBuffer.append("null");
}
else {
newDimension(dimension);
}
}
void newDimension(final Dimension dimension) {
startConstructor(Dimension.class.getName());
push(dimension.width);
push(dimension.height);
endConstructor();
}
void newInsets(final Insets insets){
startConstructor(Insets.class.getName());
push(insets.top);
push(insets.left);
push(insets.bottom);
push(insets.right);
endConstructor();
}
private void newRectangle(final Rectangle rectangle) {
startConstructor(Rectangle.class.getName());
push(rectangle.x);
push(rectangle.y);
push(rectangle.width);
push(rectangle.height);
endConstructor();
}
void startMethodCall(@NonNls final String variable, @NonNls final String methodName) {
checkParameter();
append(variable);
myBuffer.append('.');
myBuffer.append(methodName);
myBuffer.append('(');
myIsFirstParameterStack.push(Boolean.TRUE);
}
private void startStaticMethodCall(final Class aClass, @NonNls final String methodName) {
checkParameter();
myBuffer.append(aClass.getName());
myBuffer.append('.');
myBuffer.append(methodName);
myBuffer.append('(');
myIsFirstParameterStack.push(Boolean.TRUE);
}
void endMethod() {
myBuffer.append(')');
myIsFirstParameterStack.pop();
if (myIsFirstParameterStack.empty()) {
myBuffer.append(";\n");
}
}
void startConstructor(final String className) {
checkParameter();
myBuffer.append("new ");
myBuffer.append(className);
myBuffer.append('(');
myIsFirstParameterStack.push(Boolean.TRUE);
}
void endConstructor() {
endMethod();
}
void push(final byte value) {
checkParameter();
append(value);
}
void append(byte value) {
myBuffer.append("(byte) ");
myBuffer.append(value);
}
void push(final short value) {
checkParameter();
append(value);
}
void append(short value) {
myBuffer.append("(short) ");
myBuffer.append(value);
}
void push(final int value) {
checkParameter();
append(value);
}
void append(final int value) {
myBuffer.append(value);
}
void push(final int value, final TIntObjectHashMap map){
final String stringRepresentation = (String)map.get(value);
if (stringRepresentation != null) {
checkParameter();
myBuffer.append(stringRepresentation);
}
else {
push(value);
}
}
private void push(final double value) {
checkParameter();
append(value);
}
public void append(final double value) {
myBuffer.append(value);
}
private void push(final float value) {
checkParameter();
append(value);
}
public void append(final float value) {
myBuffer.append(value).append("f");
}
private void push(final long value) {
checkParameter();
append(value);
}
public void append(final long value) {
myBuffer.append(value).append("L");
}
private void push(final char value) {
checkParameter();
append(value);
}
public void append(final char value) {
myBuffer.append("'").append(value).append("'");
}
void push(final boolean value) {
checkParameter();
myBuffer.append(value);
}
void push(final String value) {
checkParameter();
if (value == null) {
myBuffer.append("null");
}
else {
myBuffer.append('"');
myBuffer.append(StringUtil.escapeStringCharacters(value));
myBuffer.append('"');
}
}
void pushVar(@NonNls final String variable) {
checkParameter();
append(variable);
}
void append(@NonNls final String text) {
myBuffer.append(text);
}
void checkParameter() {
if (!myIsFirstParameterStack.empty()) {
final Boolean b = myIsFirstParameterStack.pop();
if (b.equals(Boolean.FALSE)) {
myBuffer.append(',');
}
myIsFirstParameterStack.push(Boolean.FALSE);
}
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.2-hudson-jaxb-ri-2.2-63-
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2013.10.29 at 05:09:52 \uc624\ud6c4 KST
//
package net.ion.open.oadr2.model.v20b;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import net.ion.open.oadr2.model.v20b.ei.EiResponse;
import org.jvnet.jaxb2_commons.lang.Equals;
import org.jvnet.jaxb2_commons.lang.EqualsStrategy;
import org.jvnet.jaxb2_commons.lang.HashCode;
import org.jvnet.jaxb2_commons.lang.HashCodeStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBEqualsStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBHashCodeStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBToStringStrategy;
import org.jvnet.jaxb2_commons.lang.ToString;
import org.jvnet.jaxb2_commons.lang.ToStringStrategy;
import org.jvnet.jaxb2_commons.locator.ObjectLocator;
import org.jvnet.jaxb2_commons.locator.util.LocatorUtils;
/**
* <p>Java class for oadrCreatedReportType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="oadrCreatedReportType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{http://docs.oasis-open.org/ns/energyinterop/201110}eiResponse"/>
* <element ref="{http://openadr.org/oadr-2.0b/2012/07}oadrPendingReports"/>
* <element ref="{http://docs.oasis-open.org/ns/energyinterop/201110}venID" minOccurs="0"/>
* </sequence>
* <attribute ref="{http://docs.oasis-open.org/ns/energyinterop/201110}schemaVersion"/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "oadrCreatedReportType", propOrder = {
"eiResponse",
"oadrPendingReports",
"venID"
})
@XmlRootElement(name = "oadrCreatedReport")
public class OadrCreatedReport implements Serializable, Equals, HashCode, ToString
{
private final static long serialVersionUID = 1L;
@XmlElement(namespace = "http://docs.oasis-open.org/ns/energyinterop/201110", required = true)
protected EiResponse eiResponse;
@XmlElement(required = true)
protected OadrPendingReports oadrPendingReports;
@XmlElement(namespace = "http://docs.oasis-open.org/ns/energyinterop/201110")
protected String venID;
@XmlAttribute(name = "schemaVersion", namespace = "http://docs.oasis-open.org/ns/energyinterop/201110")
protected String schemaVersion;
/**
* Default no-arg constructor
*
*/
public OadrCreatedReport() {
super();
}
/**
* Fully-initialising value constructor
*
*/
public OadrCreatedReport(final EiResponse eiResponse, final OadrPendingReports oadrPendingReports, final String venID, final String schemaVersion) {
this.eiResponse = eiResponse;
this.oadrPendingReports = oadrPendingReports;
this.venID = venID;
this.schemaVersion = schemaVersion;
}
/**
* Gets the value of the eiResponse property.
*
* @return
* possible object is
* {@link EiResponse }
*
*/
public EiResponse getEiResponse() {
return eiResponse;
}
/**
* Sets the value of the eiResponse property.
*
* @param value
* allowed object is
* {@link EiResponse }
*
*/
public void setEiResponse(EiResponse value) {
this.eiResponse = value;
}
/**
* List of periodic reports that have not yet been delivered
*
* @return
* possible object is
* {@link OadrPendingReports }
*
*/
public OadrPendingReports getOadrPendingReports() {
return oadrPendingReports;
}
/**
* Sets the value of the oadrPendingReports property.
*
* @param value
* allowed object is
* {@link OadrPendingReports }
*
*/
public void setOadrPendingReports(OadrPendingReports value) {
this.oadrPendingReports = value;
}
/**
* Gets the value of the venID property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getVenID() {
return venID;
}
/**
* Sets the value of the venID property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setVenID(String value) {
this.venID = value;
}
/**
* Gets the value of the schemaVersion property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getSchemaVersion() {
return schemaVersion;
}
/**
* Sets the value of the schemaVersion property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setSchemaVersion(String value) {
this.schemaVersion = value;
}
public String toString() {
final ToStringStrategy strategy = JAXBToStringStrategy.INSTANCE;
final StringBuilder buffer = new StringBuilder();
append(null, buffer, strategy);
return buffer.toString();
}
public StringBuilder append(ObjectLocator locator, StringBuilder buffer, ToStringStrategy strategy) {
strategy.appendStart(locator, this, buffer);
appendFields(locator, buffer, strategy);
strategy.appendEnd(locator, this, buffer);
return buffer;
}
public StringBuilder appendFields(ObjectLocator locator, StringBuilder buffer, ToStringStrategy strategy) {
{
EiResponse theEiResponse;
theEiResponse = this.getEiResponse();
strategy.appendField(locator, this, "eiResponse", buffer, theEiResponse);
}
{
OadrPendingReports theOadrPendingReports;
theOadrPendingReports = this.getOadrPendingReports();
strategy.appendField(locator, this, "oadrPendingReports", buffer, theOadrPendingReports);
}
{
String theVenID;
theVenID = this.getVenID();
strategy.appendField(locator, this, "venID", buffer, theVenID);
}
{
String theSchemaVersion;
theSchemaVersion = this.getSchemaVersion();
strategy.appendField(locator, this, "schemaVersion", buffer, theSchemaVersion);
}
return buffer;
}
public boolean equals(ObjectLocator thisLocator, ObjectLocator thatLocator, Object object, EqualsStrategy strategy) {
if (!(object instanceof OadrCreatedReport)) {
return false;
}
if (this == object) {
return true;
}
final OadrCreatedReport that = ((OadrCreatedReport) object);
{
EiResponse lhsEiResponse;
lhsEiResponse = this.getEiResponse();
EiResponse rhsEiResponse;
rhsEiResponse = that.getEiResponse();
if (!strategy.equals(LocatorUtils.property(thisLocator, "eiResponse", lhsEiResponse), LocatorUtils.property(thatLocator, "eiResponse", rhsEiResponse), lhsEiResponse, rhsEiResponse)) {
return false;
}
}
{
OadrPendingReports lhsOadrPendingReports;
lhsOadrPendingReports = this.getOadrPendingReports();
OadrPendingReports rhsOadrPendingReports;
rhsOadrPendingReports = that.getOadrPendingReports();
if (!strategy.equals(LocatorUtils.property(thisLocator, "oadrPendingReports", lhsOadrPendingReports), LocatorUtils.property(thatLocator, "oadrPendingReports", rhsOadrPendingReports), lhsOadrPendingReports, rhsOadrPendingReports)) {
return false;
}
}
{
String lhsVenID;
lhsVenID = this.getVenID();
String rhsVenID;
rhsVenID = that.getVenID();
if (!strategy.equals(LocatorUtils.property(thisLocator, "venID", lhsVenID), LocatorUtils.property(thatLocator, "venID", rhsVenID), lhsVenID, rhsVenID)) {
return false;
}
}
{
String lhsSchemaVersion;
lhsSchemaVersion = this.getSchemaVersion();
String rhsSchemaVersion;
rhsSchemaVersion = that.getSchemaVersion();
if (!strategy.equals(LocatorUtils.property(thisLocator, "schemaVersion", lhsSchemaVersion), LocatorUtils.property(thatLocator, "schemaVersion", rhsSchemaVersion), lhsSchemaVersion, rhsSchemaVersion)) {
return false;
}
}
return true;
}
public boolean equals(Object object) {
final EqualsStrategy strategy = JAXBEqualsStrategy.INSTANCE;
return equals(null, null, object, strategy);
}
public int hashCode(ObjectLocator locator, HashCodeStrategy strategy) {
int currentHashCode = 1;
{
EiResponse theEiResponse;
theEiResponse = this.getEiResponse();
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "eiResponse", theEiResponse), currentHashCode, theEiResponse);
}
{
OadrPendingReports theOadrPendingReports;
theOadrPendingReports = this.getOadrPendingReports();
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "oadrPendingReports", theOadrPendingReports), currentHashCode, theOadrPendingReports);
}
{
String theVenID;
theVenID = this.getVenID();
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "venID", theVenID), currentHashCode, theVenID);
}
{
String theSchemaVersion;
theSchemaVersion = this.getSchemaVersion();
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "schemaVersion", theSchemaVersion), currentHashCode, theSchemaVersion);
}
return currentHashCode;
}
public int hashCode() {
final HashCodeStrategy strategy = JAXBHashCodeStrategy.INSTANCE;
return this.hashCode(null, strategy);
}
public OadrCreatedReport withEiResponse(EiResponse value) {
setEiResponse(value);
return this;
}
public OadrCreatedReport withOadrPendingReports(OadrPendingReports value) {
setOadrPendingReports(value);
return this;
}
public OadrCreatedReport withVenID(String value) {
setVenID(value);
return this;
}
public OadrCreatedReport withSchemaVersion(String value) {
setSchemaVersion(value);
return this;
}
}
| |
/*
* Copyright (c) 2000, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
*
* (C) Copyright IBM Corp. 1999 All Rights Reserved.
* Copyright 1997 The Open Group Research Institute. All rights reserved.
*/
package sun.security.krb5.internal.ktab;
import sun.security.krb5.*;
import sun.security.krb5.internal.*;
import sun.security.krb5.internal.crypto.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.io.IOException;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.Vector;
/**
* This class represents key table. The key table functions deal with storing
* and retrieving service keys for use in authentication exchanges.
*
* A KeyTab object is always constructed, if the file specified does not
* exist, it's still valid but empty. If there is an I/O error or file format
* error, it's invalid.
*
* The class is immutable on the read side (the write side is only used by
* the ktab tool).
*
* @author Yanni Zhang
*/
public class KeyTab implements KeyTabConstants {
private static final boolean DEBUG = Krb5.DEBUG;
private static String defaultTabName = null;
// Attention: Currently there is no way to remove a keytab from this map,
// this might lead to a memory leak.
private static Map<String,KeyTab> map = new HashMap<>();
// KeyTab file does not exist. Note: a missing keytab is still valid
private boolean isMissing = false;
// KeyTab file is invalid, possibly an I/O error or a file format error.
private boolean isValid = true;
private final String tabName;
private long lastModified;
private int kt_vno;
private Vector<KeyTabEntry> entries = new Vector<>();
/**
* Constructs a KeyTab object.
*
* If there is any I/O error or format errot during the loading, the
* isValid flag is set to false, and all half-read entries are dismissed.
* @param filename path name for the keytab file, must not be null
*/
private KeyTab(String filename) {
tabName = filename;
try {
lastModified = new File(tabName).lastModified();
try (KeyTabInputStream kis =
new KeyTabInputStream(new FileInputStream(filename))) {
load(kis);
}
} catch (FileNotFoundException e) {
entries.clear();
isMissing = true;
} catch (Exception ioe) {
entries.clear();
isValid = false;
}
}
/**
* Read a keytab file. Returns a new object and save it into cache when
* new content (modified since last read) is available. If keytab file is
* invalid, the old object will be returned. This is a safeguard for
* partial-written keytab files or non-stable network. Please note that
* a missing keytab is valid, which is equivalent to an empty keytab.
*
* @param s file name of keytab, must not be null
* @return the keytab object, can be invalid, but never null.
*/
private synchronized static KeyTab getInstance0(String s) {
long lm = new File(s).lastModified();
KeyTab old = map.get(s);
if (old != null && old.isValid() && old.lastModified == lm) {
return old;
}
KeyTab ktab = new KeyTab(s);
if (ktab.isValid()) { // A valid new keytab
map.put(s, ktab);
return ktab;
} else if (old != null) { // An existing old one
return old;
} else {
return ktab; // first read is invalid
}
}
/**
* Gets a KeyTab object.
* @param s the key tab file name.
* @return the KeyTab object, never null.
*/
public static KeyTab getInstance(String s) {
if (s == null) {
return getInstance();
} else {
return getInstance0(s);
}
}
/**
* Gets a KeyTab object.
* @param file the key tab file.
* @return the KeyTab object, never null.
*/
public static KeyTab getInstance(File file) {
if (file == null) {
return getInstance();
} else {
return getInstance0(file.getPath());
}
}
/**
* Gets the default KeyTab object.
* @return the KeyTab object, never null.
*/
public static KeyTab getInstance() {
return getInstance(getDefaultTabName());
}
public boolean isMissing() {
return isMissing;
}
public boolean isValid() {
return isValid;
}
/**
* The location of keytab file will be read from the configuration file
* If it is not specified, consider user.home as the keytab file's
* default location.
* @return never null
*/
private static String getDefaultTabName() {
if (defaultTabName != null) {
return defaultTabName;
} else {
String kname = null;
try {
String keytab_names = Config.getInstance().getDefault
("default_keytab_name", "libdefaults");
if (keytab_names != null) {
StringTokenizer st = new StringTokenizer(keytab_names, " ");
while (st.hasMoreTokens()) {
kname = parse(st.nextToken());
if (new File(kname).exists()) {
break;
}
}
}
} catch (KrbException e) {
kname = null;
}
if (kname == null) {
String user_home =
java.security.AccessController.doPrivileged(
new sun.security.action.GetPropertyAction("user.home"));
if (user_home == null) {
user_home =
java.security.AccessController.doPrivileged(
new sun.security.action.GetPropertyAction("user.dir"));
}
kname = user_home + File.separator + "krb5.keytab";
}
defaultTabName = kname;
return kname;
}
}
/**
* Parses some common keytab name formats
* @param name never null
* @return never null
*/
private static String parse(String name) {
String kname;
if ((name.length() >= 5) &&
(name.substring(0, 5).equalsIgnoreCase("FILE:"))) {
kname = name.substring(5);
} else if ((name.length() >= 9) &&
(name.substring(0, 9).equalsIgnoreCase("ANY:FILE:"))) {
// this format found in MIT's krb5.ini.
kname = name.substring(9);
} else if ((name.length() >= 7) &&
(name.substring(0, 7).equalsIgnoreCase("SRVTAB:"))) {
// this format found in MIT's krb5.ini.
kname = name.substring(7);
} else
kname = name;
return kname;
}
private void load(KeyTabInputStream kis)
throws IOException, RealmException {
entries.clear();
kt_vno = kis.readVersion();
if (kt_vno == KRB5_KT_VNO_1) {
kis.setNativeByteOrder();
}
int entryLength = 0;
KeyTabEntry entry;
while (kis.available() > 0) {
entryLength = kis.readEntryLength();
entry = kis.readEntry(entryLength, kt_vno);
if (DEBUG) {
System.out.println(">>> KeyTab: load() entry length: " +
entryLength + "; type: " +
(entry != null? entry.keyType : 0));
}
if (entry != null)
entries.addElement(entry);
}
}
/**
* Reads all keys for a service from the keytab file that have
* etypes that have been configured for use. If there are multiple
* keys with same etype, the one with the highest kvno is returned.
* @param service the PrincipalName of the requested service
* @return an array containing all the service keys, never null
*/
public EncryptionKey[] readServiceKeys(PrincipalName service) {
KeyTabEntry entry;
EncryptionKey key;
int size = entries.size();
ArrayList<EncryptionKey> keys = new ArrayList<>(size);
for (int i = size-1; i >= 0; i--) {
entry = entries.elementAt(i);
if (entry.service.match(service)) {
if (EType.isSupported(entry.keyType)) {
key = new EncryptionKey(entry.keyblock,
entry.keyType,
new Integer(entry.keyVersion));
keys.add(key);
if (DEBUG) {
System.out.println("Added key: " + entry.keyType +
"version: " + entry.keyVersion);
}
} else if (DEBUG) {
System.out.println("Found unsupported keytype (" +
entry.keyType + ") for " + service);
}
}
}
size = keys.size();
EncryptionKey[] retVal = keys.toArray(new EncryptionKey[size]);
// Sort keys according to default_tkt_enctypes
if (DEBUG) {
System.out.println("Ordering keys wrt default_tkt_enctypes list");
}
final int[] etypes = EType.getDefaults("default_tkt_enctypes");
// Sort the keys, k1 is preferred than k2 if:
// 1. k1's etype appears earlier in etypes than k2's
// 2. If same, k1's KVNO is higher
Arrays.sort(retVal, new Comparator<EncryptionKey>() {
@Override
public int compare(EncryptionKey o1, EncryptionKey o2) {
if (etypes != null) {
int o1EType = o1.getEType();
int o2EType = o2.getEType();
if (o1EType != o2EType) {
for (int i=0; i<etypes.length; i++) {
if (etypes[i] == o1EType) {
return -1;
} else if (etypes[i] == o2EType) {
return 1;
}
}
// Neither o1EType nor o2EType in default_tkt_enctypes,
// therefore won't be used in AS-REQ. We do not care
// about their order, use kvno is OK.
}
}
return o2.getKeyVersionNumber().intValue()
- o1.getKeyVersionNumber().intValue();
}
});
return retVal;
}
/**
* Searches for the service entry in the keytab file.
* The etype of the key must be one that has been configured
* to be used.
* @param service the PrincipalName of the requested service.
* @return true if the entry is found, otherwise, return false.
*/
public boolean findServiceEntry(PrincipalName service) {
KeyTabEntry entry;
for (int i = 0; i < entries.size(); i++) {
entry = entries.elementAt(i);
if (entry.service.match(service)) {
if (EType.isSupported(entry.keyType)) {
return true;
} else if (DEBUG) {
System.out.println("Found unsupported keytype (" +
entry.keyType + ") for " + service);
}
}
}
return false;
}
public String tabName() {
return tabName;
}
/////////////////// THE WRITE SIDE ///////////////////////
/////////////// only used by ktab tool //////////////////
/**
* Adds a new entry in the key table.
* @param service the service which will have a new entry in the key table.
* @param psswd the password which generates the key.
* @param kvno the kvno to use, -1 means automatic increasing
* @param append false if entries with old kvno would be removed.
* Note: if kvno is not -1, entries with the same kvno are always removed
*/
public void addEntry(PrincipalName service, char[] psswd,
int kvno, boolean append) throws KrbException {
EncryptionKey[] encKeys = EncryptionKey.acquireSecretKeys(
psswd, service.getSalt());
// There should be only one maximum KVNO value for all etypes, so that
// all added keys can have the same KVNO.
int maxKvno = 0; // only useful when kvno == -1
for (int i = entries.size()-1; i >= 0; i--) {
KeyTabEntry e = entries.get(i);
if (e.service.match(service)) {
if (e.keyVersion > maxKvno) {
maxKvno = e.keyVersion;
}
if (!append || e.keyVersion == kvno) {
entries.removeElementAt(i);
}
}
}
if (kvno == -1) {
kvno = maxKvno + 1;
}
for (int i = 0; encKeys != null && i < encKeys.length; i++) {
int keyType = encKeys[i].getEType();
byte[] keyValue = encKeys[i].getBytes();
KeyTabEntry newEntry = new KeyTabEntry(service,
service.getRealm(),
new KerberosTime(System.currentTimeMillis()),
kvno, keyType, keyValue);
entries.addElement(newEntry);
}
}
/**
* Gets the list of service entries in key table.
* @return array of <code>KeyTabEntry</code>.
*/
public KeyTabEntry[] getEntries() {
KeyTabEntry[] kentries = new KeyTabEntry[entries.size()];
for (int i = 0; i < kentries.length; i++) {
kentries[i] = entries.elementAt(i);
}
return kentries;
}
/**
* Creates a new default key table.
*/
public synchronized static KeyTab create()
throws IOException, RealmException {
String dname = getDefaultTabName();
return create(dname);
}
/**
* Creates a new default key table.
*/
public synchronized static KeyTab create(String name)
throws IOException, RealmException {
try (KeyTabOutputStream kos =
new KeyTabOutputStream(new FileOutputStream(name))) {
kos.writeVersion(KRB5_KT_VNO);
}
return new KeyTab(name);
}
/**
* Saves the file at the directory.
*/
public synchronized void save() throws IOException {
try (KeyTabOutputStream kos =
new KeyTabOutputStream(new FileOutputStream(tabName))) {
kos.writeVersion(kt_vno);
for (int i = 0; i < entries.size(); i++) {
kos.writeEntry(entries.elementAt(i));
}
}
}
/**
* Removes entries from the key table.
* @param service the service <code>PrincipalName</code>.
* @param etype the etype to match, remove all if -1
* @param kvno what kvno to remove, -1 for all, -2 for old
* @return the number of entries deleted
*/
public int deleteEntries(PrincipalName service, int etype, int kvno) {
int count = 0;
// Remember the highest KVNO for each etype. Used for kvno == -2
Map<Integer,Integer> highest = new HashMap<>();
for (int i = entries.size()-1; i >= 0; i--) {
KeyTabEntry e = entries.get(i);
if (service.match(e.getService())) {
if (etype == -1 || e.keyType == etype) {
if (kvno == -2) {
// Two rounds for kvno == -2. In the first round (here),
// only find out highest KVNO for each etype
if (highest.containsKey(e.keyType)) {
int n = highest.get(e.keyType);
if (e.keyVersion > n) {
highest.put(e.keyType, e.keyVersion);
}
} else {
highest.put(e.keyType, e.keyVersion);
}
} else if (kvno == -1 || e.keyVersion == kvno) {
entries.removeElementAt(i);
count++;
}
}
}
}
// Second round for kvno == -2, remove old entries
if (kvno == -2) {
for (int i = entries.size()-1; i >= 0; i--) {
KeyTabEntry e = entries.get(i);
if (service.match(e.getService())) {
if (etype == -1 || e.keyType == etype) {
int n = highest.get(e.keyType);
if (e.keyVersion != n) {
entries.removeElementAt(i);
count++;
}
}
}
}
}
return count;
}
/**
* Creates key table file version.
* @param file the key table file.
* @exception IOException.
*/
public synchronized void createVersion(File file) throws IOException {
try (KeyTabOutputStream kos =
new KeyTabOutputStream(new FileOutputStream(file))) {
kos.write16(KRB5_KT_VNO);
}
}
}
| |
package edu.mayo.mprc.swift.ui.client.widgets.validation;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.Element;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.*;
import edu.mayo.mprc.dbcurator.client.CurationEditor;
import edu.mayo.mprc.dbcurator.client.EditorCloseCallback;
import edu.mayo.mprc.swift.ui.client.dialogs.ErrorDialog;
import edu.mayo.mprc.swift.ui.client.dialogs.PreviewDialog;
import edu.mayo.mprc.swift.ui.client.dialogs.SaveDialog;
import edu.mayo.mprc.swift.ui.client.dialogs.ValidationPanel;
import edu.mayo.mprc.swift.ui.client.rpc.*;
import edu.mayo.mprc.swift.ui.client.service.ServiceAsync;
import edu.mayo.mprc.swift.ui.client.widgets.Callback;
import edu.mayo.mprc.swift.ui.client.widgets.ExistingDOMPanel;
import edu.mayo.mprc.swift.ui.client.widgets.ParamSetSelectionController;
import edu.mayo.mprc.swift.ui.client.widgets.ParamsSelector;
import java.util.*;
/**
* Allow users to edit parameter sets directly on the swift submit page.
* <p/>
* Note that this is no longer really a panel. It requires a number of HTML elements
* that it inserts itself into:
* The first row is fixed and has ids:
* paramsToggle, paramsSelector, globalParamsValidation
* Subsequent rows labeled with paramRow are cloned and elements
* filled as parameters are added:
* paramRow: paramLabel, paramEntry, paramValidation
*/
public final class SimpleParamsEditorPanel implements SourcesChangeEvents {
public static final String ACTION_LINK = "actionLink";
public static final String SPACE_AFTER = "spaceAfter";
public static final String LABEL1 = "paramLabelLeftCol";
public static final String ENTRY1 = "paramEntryLeftCol";
public static final String LABEL2 = "paramLabelRightCol";
public static final String ENTRY2 = "paramEntryRightCol";
public static final String VALIDATION = "paramValidation";
public static final String PARAMS_LABEL = "params-label";
public static final String MODIFICATIONS = "modifications";
public static final String TOLERANCES = "tolerances";
public static final String INSTRUMENT = "instrument";
public static final String SCAFFOLD_SETTINGS = "scaffoldSettings";
public static final String ENABLED_ENGINES = "enabledEngines";
public static final String TITLE_SUFFIX = "titleSuffix";
private ChangeListenerCollection listeners = new ChangeListenerCollection();
private ParamsSelector selector;
private ServiceAsync serviceAsync;
private ValidationController validationController;
private ParamSetSelectionController selectionController;
private boolean editorEnabled = false;
private boolean editorExpanded = false;
private boolean editorVisible = editorExpanded && editorEnabled;
private boolean editorErrorMessageVisible = true;
private ModificationsLabel fixedMods;
private ModificationsLabel varMods;
private EnabledEnginesEditor enabledEngines;
private TitleSuffixTextBox titleSuffix;
//Current user
private ClientUser user;
//User map. Used to map e-mail to user id.
private Map<String/*email*/, ClientUser> userInfo;
private PushButton saveButton;
private PushButton deleteButton;
// Lists of elements constituting the editor
private List<Element> editorElements;
private List<PushButton> buttons = new ArrayList();
private DatabaseListBox dlb;
public SimpleParamsEditorPanel(final ServiceAsync serviceAsync, final InitialPageData pageData) {
this.serviceAsync = serviceAsync;
userInfo = new HashMap<String, ClientUser>();
for (final ClientUser clientUser : pageData.listUsers()) {
userInfo.put(clientUser.getEmail(), clientUser);
}
selectionController = new ParamSetSelectionController(serviceAsync);
validationController = new ValidationController(serviceAsync, selectionController, pageData);
editorElements = new ArrayList();
final HorizontalPanel hp = new HorizontalPanel();
final RootPanel paramsSelectorPanel = RootPanel.get("paramsSelector");
paramsSelectorPanel.add(hp);
selector = new ParamsSelector();
selectionController.setSelector(selector);
selectionController.setParamSetList(pageData.getParamSetList());
hp.add(selector);
// save buttons //////////////////////////////////////////////////////////
final PushButton button;
hp.add(saveButton = new PushButton("Save..."));
saveButton.addStyleName(ACTION_LINK);
saveButton.addStyleName(SPACE_AFTER);
saveButton.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent event) {
save();
}
});
saveButton.setVisible(editorVisible);
buttons.add(saveButton);
hp.add(button = new PushButton("Preview..."));
button.addStyleName(ACTION_LINK);
button.addStyleName(SPACE_AFTER);
button.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent event) {
preview();
}
});
buttons.add(button);
hp.add(deleteButton = new PushButton("Delete..."));
deleteButton.addStyleName(ACTION_LINK);
deleteButton.addStyleName(SPACE_AFTER);
deleteButton.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent event) {
delete();
}
});
buttons.add(deleteButton);
// description, initially hidden /////////////////////////////////////////////
final HTMLPanel description;
paramsSelectorPanel.add(description = new HTMLPanel("<I>This is a description of the ParameterSet.</I>"));
description.setSize("500px", "50px");
description.setStyleName("dottedBorder");
description.setVisible(false);
/// Existing DOM //////////////////////////////////////////////////////////////
// Grab the existing DOM for the parameter rows.
final ExistingDOMPanel edp = new ExistingDOMPanel("paramRow");
/// database ///////////////////////////////////////////
{
final ExistingDOMPanel dbrow = new ExistingDOMPanel("paramDbRow");
final Label label = new Label("Database:");
label.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT);
label.setStyleName(PARAMS_LABEL);
editorElements.add(dbrow.append("database", "paramDbLabel", label, editorVisible));
final ValidationPanel vp = new ValidationPanel(10);
dlb = new DatabaseListBox("sequence.database", userInfo);
final HorizontalPanel p = new HorizontalPanel();
p.add(dlb);
//dlb.setStyleName("spaceAfter");
final PushButton pb = new PushButton("Add or Review Database...");
pb.addStyleName(ACTION_LINK);
pb.setTitle("Click here to review the selected database and potentially modify it for your own needs.");
pb.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent event) {
popupDbCurator();
}
});
validationController.add(dlb, "sequence.database", vp);
p.add(pb);
editorElements.add(dbrow.append("database", "paramDbEntry", p, editorVisible));
editorElements.add(dbrow.append("database", "paramDbValidation", vp, editorVisible));
}
/// enzyme /////////////////////////////////////////////
{
final Label label = new Label("Protease:");
label.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT);
label.setStyleName(PARAMS_LABEL);
editorElements.add(edp.append("enzyme", LABEL1, label, editorVisible));
final ValidationPanel vp = new ValidationPanel(10);
final ProteaseListBox tb;
validationController.add(tb = new ProteaseListBox("sequence.enzyme"), "sequence.enzyme", vp);
final SemiCheckBox sb;
validationController.add(sb = new SemiCheckBox(tb), "sequence.min_termini_cleavages", vp);
editorElements.add(edp.append("enzyme", ENTRY1, tb, editorVisible));
editorElements.add(edp.append("enzyme", ENTRY1, sb.asWidget(), editorVisible));
final Label label1 = new Label("Missed Cleavages:");
label1.setStyleName(PARAMS_LABEL);
label1.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT);
final ValidatableTextBox tdb = new ValidatableTextBox("sequence.missed_cleavages") {
@Override
protected ClientValue getValueFromString(final String value) {
if ((value == null) || (value.isEmpty())) {
return null;
}
try {
return new ClientInteger(value);
} catch (NumberFormatException ignore) {
final ClientValidationList list = new ClientValidationList();
final ClientValidation cv = new ClientValidation("Not a number: " + value,
"sequence.missed_cleavages", ClientValidation.SEVERITY_ERROR);
list.add(cv);
validationController.update("sequence.missed_cleavages", list);
return null;
}
}
@Override
protected String setValueAsString(final ClientValue object) {
return object.toString();
}
@Override
public void setAllowedValues(final List<? extends ClientValue> values) {
// unused.
}
@Override
public boolean needsAllowedValues() {
return false;
}
};
tdb.setVisibleLength(5);
editorElements.add(edp.append("enzyme", LABEL2, label1, editorVisible));
editorElements.add(edp.append("enzyme", ENTRY2, tdb.asWidget(), editorVisible));
validationController.add(tdb, "sequence.missed_cleavages", vp);
editorElements.add(edp.append("enzyme", VALIDATION, vp, editorVisible));
}
/// modifications /////////////////////////////////////////
{
final ValidationPanel vp = new ValidationPanel(10);
final Label label = new Label("Fixed Modifications:");
label.setStyleName(PARAMS_LABEL);
label.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT);
editorElements.add(edp.append(MODIFICATIONS, LABEL1, label, editorVisible));
// fixed mods label
fixedMods = new ModificationsLabel(ModificationSelectionEditor.FIXED_PARAM_NAME, ModificationSelectionEditor.FIXED_PARAM_NAME);
final ModificationSelectionEditor fixedModsEditor = new ModificationSelectionEditor(ModificationSelectionEditor.FIXED_PARAM_NAME, ModificationSelectionEditor.FIXED_MOD_TYPE);
fixedMods.setEditor(fixedModsEditor);
validationController.add(fixedMods, ModificationSelectionEditor.FIXED_PARAM_NAME, vp);
editorElements.add(edp.append(MODIFICATIONS, ENTRY1, fixedMods, editorVisible));
// variable mods label
varMods = new ModificationsLabel(ModificationSelectionEditor.VARIABLE_PARAM_NAME, ModificationSelectionEditor.VARIABLE_PARAM_NAME);
final ModificationSelectionEditor varModsEditor = new ModificationSelectionEditor(ModificationSelectionEditor.VARIABLE_PARAM_NAME, ModificationSelectionEditor.VARIABLE_MOD_TYPE);
varMods.setEditor(varModsEditor);
validationController.add(varMods, ModificationSelectionEditor.VARIABLE_PARAM_NAME, vp);
final Label label1 = new Label("Variable Modifications:");
label1.setStyleName(PARAMS_LABEL);
label1.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT);
editorElements.add(edp.append(MODIFICATIONS, LABEL2, label1, editorVisible));
editorElements.add(edp.append(MODIFICATIONS, ENTRY2, varMods, editorVisible));
editorElements.add(edp.append(MODIFICATIONS, VALIDATION, vp, editorVisible));
}
/// tolerances /////////////////////////////////////////
{
final Label label = new Label("Peptide Tolerance:");
label.setStyleName(PARAMS_LABEL);
label.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT);
editorElements.add(edp.append(TOLERANCES, LABEL1, label, editorVisible));
final ValidationPanel vp = new ValidationPanel(10);
final ToleranceBox peptideTolerance;
validationController.add(peptideTolerance = new ToleranceBox("tolerance.peptide"), "tolerance.peptide", vp);
editorElements.add(edp.append(TOLERANCES, ENTRY1, peptideTolerance.asWidget(), editorVisible));
final Label label1 = new Label("Fragment Tolerance:");
editorElements.add(edp.append(TOLERANCES, LABEL2, label1, editorVisible));
label1.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT);
label1.setStyleName(PARAMS_LABEL);
final ToleranceBox fragmentTolerance;
validationController.add(fragmentTolerance = new ToleranceBox("tolerance.fragment"), "tolerance.fragment", vp);
editorElements.add(edp.append(TOLERANCES, ENTRY2, fragmentTolerance.asWidget(), editorVisible));
editorElements.add(edp.append(TOLERANCES, VALIDATION, vp, editorVisible));
}
/// instrument /////////////////////////////////////////
final ValidationPanel instrumentVp;
{
final Label label = new Label("Instrument:");
editorElements.add(edp.append(INSTRUMENT, LABEL1, label, editorVisible));
label.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT);
label.setStyleName(PARAMS_LABEL);
instrumentVp = new ValidationPanel(10);
final InstrumentListBox lb;
validationController.add(lb = new InstrumentListBox(INSTRUMENT), INSTRUMENT, instrumentVp);
editorElements.add(edp.append(INSTRUMENT, ENTRY1, lb, editorVisible));
editorElements.add(edp.append(INSTRUMENT, VALIDATION, instrumentVp, editorVisible));
}
/// spectrum extraction params /////////////////////////////////////////
{
final Label label = new Label("Spectrum extraction:");
editorElements.add(edp.append(INSTRUMENT, LABEL2, label, editorVisible));
label.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT);
label.setStyleName(PARAMS_LABEL);
final SpectrumExtractionEditor ed = new SpectrumExtractionEditor(pageData.isExtractMsnEnabled(), pageData.isMsConvertEnabled());
validationController.add(ed, "extractMsnSettings", instrumentVp);
editorElements.add(edp.append(INSTRUMENT, ENTRY2, ed, editorVisible));
}
/// scaffold params /////////////////////////////////////////
{
final ExistingDOMPanel row = new ExistingDOMPanel("scaffoldRow");
final Label label = new Label("Scaffold:");
editorElements.add(row.append(SCAFFOLD_SETTINGS, "scaffoldLabel", label, editorVisible));
label.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT);
label.setStyleName(PARAMS_LABEL);
final ValidationPanel vp = new ValidationPanel(10);
final ScaffoldSettingsEditor ed = new ScaffoldSettingsEditor();
validationController.add(ed, SCAFFOLD_SETTINGS, vp);
editorElements.add(row.append(SCAFFOLD_SETTINGS, "scaffoldEntry", ed, editorVisible));
editorElements.add(row.append(SCAFFOLD_SETTINGS, "scaffoldValidation", vp, editorVisible));
}
// enabled engines ////////////////////////////////////////////
{
final ExistingDOMPanel row = new ExistingDOMPanel("enginesRow");
final Label label = new Label("Engines:");
editorElements.add(row.append(ENABLED_ENGINES, "enginesLabel", label, editorVisible));
label.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT);
label.setStyleName(PARAMS_LABEL);
final ValidationPanel vp = new ValidationPanel(10);
final EnabledEnginesEditor ed = new EnabledEnginesEditor(pageData.getSearchEngines());
validationController.add(ed, ENABLED_ENGINES, vp);
editorElements.add(row.append(ENABLED_ENGINES, "enginesEntry", ed, editorVisible));
editorElements.add(row.append(ENABLED_ENGINES, "enginesValidation", vp, editorVisible));
}
// title suffix ////////////////////////////////////////////
{
final ExistingDOMPanel row = new ExistingDOMPanel("titleSuffixRow");
final Label label = new Label("Title Suffix:");
editorElements.add(row.append(TITLE_SUFFIX, "titleSuffixLabel", label, editorVisible));
label.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT);
label.setStyleName(PARAMS_LABEL);
final ValidationPanel vp = new ValidationPanel(10);
titleSuffix = new TitleSuffixTextBox(TITLE_SUFFIX);
titleSuffix.setVisibleLength(100);
validationController.add(titleSuffix, TITLE_SUFFIX, vp);
editorElements.add(row.append(TITLE_SUFFIX, "titleSuffixEntry", titleSuffix.asWidget(), editorVisible));
editorElements.add(row.append(TITLE_SUFFIX, "titleSuffixValidation", vp, editorVisible));
}
validationController.setEnabled(false);
selectionController.setParamSetList(pageData.getParamSetList());
if (pageData.loadedSearch() == null) {
selectionController.setDefaultParameterSet();
}
validationController.addValueChangeHandler(new ValueChangeHandler<ClientValue>() {
@Override
public void onValueChange(final ValueChangeEvent<ClientValue> event) {
for (final PushButton button : buttons) {
button.setEnabled(isValid());
}
}
});
}
/**
* @param editorEnabled If true, the user can edit the parameters.
* @param user current user
*/
public void setEditorEnabled(final boolean editorEnabled, final ClientUser user) {
this.editorEnabled = editorEnabled;
this.user = user;
setEditorVisible(editorExpanded && this.editorEnabled, !this.editorEnabled && editorExpanded);
setDeleteVisible(editorEnabled);
}
private void setDeleteVisible(final boolean editorEnabled) {
deleteButton.setVisible(editorEnabled);
}
public boolean isEditorEnabled() {
return editorEnabled;
}
public void setParamSetList(final ClientParamSetList newList) {
selectionController.setParamSetList(newList);
}
/**
* @param editorExpanded When set to true, the parameter editor is displayed
*/
public void setEditorExpanded(final boolean editorExpanded) {
this.editorExpanded = editorExpanded;
setEditorVisible(this.editorExpanded && editorEnabled, !editorEnabled && this.editorExpanded);
}
public boolean isEditorExpanded() {
return editorExpanded;
}
/**
* The actual function that does the grunt work of making the editor visible if it is expanded,
* and hiding it when it is not. Tests whether the editor was visible previously and changes visibility only
* if there was a change.
*
* @param editorVisible Whether the editor should be made visible.
* @param errorMessageVisible Whether the editor error message should be made visible instead of the editor.
*/
private void setEditorVisible(final boolean editorVisible, final boolean errorMessageVisible) {
if (this.editorVisible != editorVisible) {
saveButton.setVisible(editorVisible && editorEnabled);
for (final Element e : editorElements) {
DOM.setStyleAttribute(e, "display", editorVisible ? "" : "none");
}
if (editorVisible) {
// We are becoming visible. We load list of fixed/variable mods so the controls are ready when the user needs them
if (fixedMods.getAllowedValues() == null || fixedMods.getAllowedValues().isEmpty()) {
loadModificationAllowedValues();
}
}
}
this.editorVisible = editorVisible;
if (editorErrorMessageVisible != errorMessageVisible) {
DOM.setStyleAttribute(DOM.getElementById("parameterEditorDisabledMessage"), "display", errorMessageVisible ? "" : "none");
}
editorErrorMessageVisible = errorMessageVisible;
}
/**
* We load allowed mods values for both the fixed and variable mod control.
* Since both of them operate on the same list, we load the data just once and then copy it.
*/
private void loadModificationAllowedValues() {
validationController.getAllowedValuesForValidatable(fixedMods, new Callback() {
@Override
public void done() {
varMods.setAllowedValues(fixedMods.getAllowedValues());
}
});
}
public ClientParamSet getSelectedParamSet() {
return selectionController.getSelectedParamSet();
}
public void setSelectedParamSet(final ClientParamSet paramSet) {
selectionController.select(paramSet);
}
public ValidationController getValidationController() {
return validationController;
}
/**
* Fires change events whenever the selected ParamSet changes, or when a change in validation state occurs.
*/
@Override
public void addChangeListener(final ChangeListener changeListener) {
listeners.add(changeListener);
}
@Override
public void removeChangeListener(final ChangeListener changeListener) {
listeners.remove(changeListener);
}
public String getTitleSuffix() {
return ((ClientString) titleSuffix.getValue()).getValue();
}
/**
* @return true if it's possible to call flushParamsFiles(), that is, if the current parameter
* selections validate with no errors.
*/
public boolean isValid() {
return validationController.isValid();
}
private void save() {
new SaveDialog(selector.getSelectedParamSet(), serviceAsync, user,
new SaveDialog.Callback() {
@Override
public void saveCompleted(final ClientParamSet paramSet) {
selectionController.refresh();
}
}
);
}
private void preview() {
new PreviewDialog(selector.getSelectedParamSet(), serviceAsync);
}
private void delete() {
final ClientParamSet setToDelete = getSelectedParamSet();
if (selectionController.getClientParamSets().size() <= 1) {
Window.alert("Cannot delete all parameter sets - at least one must remain.");
} else {
if (Window.confirm("Do you really want to delete parameter set " + setToDelete.getName() + "?")) {
serviceAsync.delete(
selector.getSelectedParamSet(), new AsyncCallback<Void>() {
@Override
public void onFailure(final Throwable throwable) {
ErrorDialog.handleGlobalError(throwable);
}
@Override
public void onSuccess(final Void aVoid) {
selectionController.refresh();
}
}
);
}
}
}
private void popupDbCurator() {
final ClientSequenceDatabase csd = (ClientSequenceDatabase) dlb.getSelected();
final Integer selected = csd.getId();
final Map<String, String> emailInitialPairs = new TreeMap<String, String>();
for (final Map.Entry<String, ClientUser> me : userInfo.entrySet()) {
emailInitialPairs.put(me.getKey(), me.getValue().getInitials());
}
final DialogBox dialogBox = new DialogBox(false);
final CurationEditor ce = new CurationEditor(selected, user.getEmail(), emailInitialPairs, new EditorCloseCallback() {
@Override
public void editorClosed(final Integer openCurationID) {
validationController.getAllowedValues(dlb, new Callback() {
@Override
public void done() {
if (openCurationID != null) {
dlb.select(openCurationID);
}
dialogBox.hide();
}
});
}
});
DOM.setElementAttribute(dialogBox.getElement(), "id", "db-curator");
dialogBox.setStyleName("dbCuratorEmbed");
dialogBox.setWidget(ce);
dialogBox.setSize(Window.getClientWidth() * .8 + "px", Window.getClientHeight() * .8 + "px");
ce.setPixelSize(Math.max((int) (Window.getClientWidth() * .8), 770), (int) (Window.getClientHeight() * .8));
// LightBox lb = new LightBox(dialogBox);
// try {
// lb.show();
// } catch (Exception ignore) {
dialogBox.show();
// }
dialogBox.center();
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.vulkan;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* Structure specifying a three-dimensional extent.
*
* <h5>See Also</h5>
*
* <p>{@link VkBufferImageCopy}, {@link VkBufferImageCopy2}, {@link VkImageCopy}, {@link VkImageCopy2}, {@link VkImageCreateInfo}, {@link VkImageFormatProperties}, {@link VkImageResolve}, {@link VkImageResolve2}, {@link VkQueueFamilyProperties}, {@link VkSparseImageFormatProperties}, {@link VkSparseImageMemoryBind}</p>
*
* <h3>Layout</h3>
*
* <pre><code>
* struct VkExtent3D {
* uint32_t {@link #width};
* uint32_t {@link #height};
* uint32_t {@link #depth};
* }</code></pre>
*/
public class VkExtent3D extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
/** The struct alignment in bytes. */
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
WIDTH,
HEIGHT,
DEPTH;
static {
Layout layout = __struct(
__member(4),
__member(4),
__member(4)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
WIDTH = layout.offsetof(0);
HEIGHT = layout.offsetof(1);
DEPTH = layout.offsetof(2);
}
/**
* Creates a {@code VkExtent3D} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public VkExtent3D(ByteBuffer container) {
super(memAddress(container), __checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** the width of the extent. */
@NativeType("uint32_t")
public int width() { return nwidth(address()); }
/** the height of the extent. */
@NativeType("uint32_t")
public int height() { return nheight(address()); }
/** the depth of the extent. */
@NativeType("uint32_t")
public int depth() { return ndepth(address()); }
/** Sets the specified value to the {@link #width} field. */
public VkExtent3D width(@NativeType("uint32_t") int value) { nwidth(address(), value); return this; }
/** Sets the specified value to the {@link #height} field. */
public VkExtent3D height(@NativeType("uint32_t") int value) { nheight(address(), value); return this; }
/** Sets the specified value to the {@link #depth} field. */
public VkExtent3D depth(@NativeType("uint32_t") int value) { ndepth(address(), value); return this; }
/** Initializes this struct with the specified values. */
public VkExtent3D set(
int width,
int height,
int depth
) {
width(width);
height(height);
depth(depth);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public VkExtent3D set(VkExtent3D src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code VkExtent3D} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static VkExtent3D malloc() {
return wrap(VkExtent3D.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code VkExtent3D} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static VkExtent3D calloc() {
return wrap(VkExtent3D.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code VkExtent3D} instance allocated with {@link BufferUtils}. */
public static VkExtent3D create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(VkExtent3D.class, memAddress(container), container);
}
/** Returns a new {@code VkExtent3D} instance for the specified memory address. */
public static VkExtent3D create(long address) {
return wrap(VkExtent3D.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkExtent3D createSafe(long address) {
return address == NULL ? null : wrap(VkExtent3D.class, address);
}
/**
* Returns a new {@link VkExtent3D.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkExtent3D.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link VkExtent3D.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkExtent3D.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link VkExtent3D.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static VkExtent3D.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link VkExtent3D.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static VkExtent3D.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkExtent3D.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
// -----------------------------------
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkExtent3D mallocStack() { return malloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkExtent3D callocStack() { return calloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkExtent3D mallocStack(MemoryStack stack) { return malloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkExtent3D callocStack(MemoryStack stack) { return calloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkExtent3D.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkExtent3D.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkExtent3D.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkExtent3D.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); }
/**
* Returns a new {@code VkExtent3D} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static VkExtent3D malloc(MemoryStack stack) {
return wrap(VkExtent3D.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code VkExtent3D} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static VkExtent3D calloc(MemoryStack stack) {
return wrap(VkExtent3D.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link VkExtent3D.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkExtent3D.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link VkExtent3D.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkExtent3D.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #width}. */
public static int nwidth(long struct) { return UNSAFE.getInt(null, struct + VkExtent3D.WIDTH); }
/** Unsafe version of {@link #height}. */
public static int nheight(long struct) { return UNSAFE.getInt(null, struct + VkExtent3D.HEIGHT); }
/** Unsafe version of {@link #depth}. */
public static int ndepth(long struct) { return UNSAFE.getInt(null, struct + VkExtent3D.DEPTH); }
/** Unsafe version of {@link #width(int) width}. */
public static void nwidth(long struct, int value) { UNSAFE.putInt(null, struct + VkExtent3D.WIDTH, value); }
/** Unsafe version of {@link #height(int) height}. */
public static void nheight(long struct, int value) { UNSAFE.putInt(null, struct + VkExtent3D.HEIGHT, value); }
/** Unsafe version of {@link #depth(int) depth}. */
public static void ndepth(long struct, int value) { UNSAFE.putInt(null, struct + VkExtent3D.DEPTH, value); }
// -----------------------------------
/** An array of {@link VkExtent3D} structs. */
public static class Buffer extends StructBuffer<VkExtent3D, Buffer> implements NativeResource {
private static final VkExtent3D ELEMENT_FACTORY = VkExtent3D.create(-1L);
/**
* Creates a new {@code VkExtent3D.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link VkExtent3D#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected VkExtent3D getElementFactory() {
return ELEMENT_FACTORY;
}
/** @return the value of the {@link VkExtent3D#width} field. */
@NativeType("uint32_t")
public int width() { return VkExtent3D.nwidth(address()); }
/** @return the value of the {@link VkExtent3D#height} field. */
@NativeType("uint32_t")
public int height() { return VkExtent3D.nheight(address()); }
/** @return the value of the {@link VkExtent3D#depth} field. */
@NativeType("uint32_t")
public int depth() { return VkExtent3D.ndepth(address()); }
/** Sets the specified value to the {@link VkExtent3D#width} field. */
public VkExtent3D.Buffer width(@NativeType("uint32_t") int value) { VkExtent3D.nwidth(address(), value); return this; }
/** Sets the specified value to the {@link VkExtent3D#height} field. */
public VkExtent3D.Buffer height(@NativeType("uint32_t") int value) { VkExtent3D.nheight(address(), value); return this; }
/** Sets the specified value to the {@link VkExtent3D#depth} field. */
public VkExtent3D.Buffer depth(@NativeType("uint32_t") int value) { VkExtent3D.ndepth(address(), value); return this; }
}
}
| |
package org.deeplearning4j.nn.multilayer;
import org.deeplearning4j.datasets.iterator.ExistingDataSetIterator;
import org.deeplearning4j.gradientcheck.GradientCheckUtil;
import org.deeplearning4j.gradientcheck.LossFunctionGradientCheck;
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.conf.*;
import org.deeplearning4j.nn.conf.distribution.NormalDistribution;
import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
import org.deeplearning4j.nn.graph.ComputationGraph;
import org.deeplearning4j.nn.weights.WeightInit;
import org.junit.Test;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.buffer.DataBuffer;
import org.nd4j.linalg.api.buffer.util.DataTypeUtil;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.impl.transforms.Not;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.lossfunctions.ILossFunction;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import org.nd4j.linalg.lossfunctions.impl.*;
import java.util.Arrays;
import java.util.Collections;
import static org.junit.Assert.*;
/**
* Created by Alex on 20/01/2017.
*/
public class TestMasking {
static {
DataTypeUtil.setDTypeForContext(DataBuffer.Type.DOUBLE);
}
@Test
public void checkMaskArrayClearance() {
for (boolean tbptt : new boolean[] {true, false}) {
//Simple "does it throw an exception" type test...
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().iterations(1).seed(12345).list()
.layer(0, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE)
.activation(Activation.IDENTITY).nIn(1).nOut(1).build())
.backpropType(tbptt ? BackpropType.TruncatedBPTT : BackpropType.Standard)
.tBPTTForwardLength(8).tBPTTBackwardLength(8).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
DataSet data = new DataSet(Nd4j.linspace(1, 10, 10).reshape(1, 1, 10),
Nd4j.linspace(2, 20, 10).reshape(1, 1, 10), Nd4j.ones(10), Nd4j.ones(10));
net.fit(data);
for (Layer l : net.getLayers()) {
assertNull(l.getMaskArray());
}
net.fit(data.getFeatures(), data.getLabels(), data.getFeaturesMaskArray(), data.getLabelsMaskArray());
for (Layer l : net.getLayers()) {
assertNull(l.getMaskArray());
}
DataSetIterator iter = new ExistingDataSetIterator(Collections.singletonList(data).iterator());
net.fit(iter);
for (Layer l : net.getLayers()) {
assertNull(l.getMaskArray());
}
}
}
@Test
public void testPerOutputMaskingMLN() {
//Idea: for per-output masking, the contents of the masked label entries should make zero difference to either
// the score or the gradients
int nIn = 6;
int layerSize = 4;
INDArray mask1 = Nd4j.create(new double[] {1, 0, 0, 1, 0});
INDArray mask3 = Nd4j.create(new double[][] {{1, 1, 1, 1, 1}, {0, 1, 0, 1, 0}, {1, 0, 0, 1, 1}});
INDArray[] labelMasks = new INDArray[] {mask1, mask3};
ILossFunction[] lossFunctions = new ILossFunction[] {new LossBinaryXENT(),
// new LossCosineProximity(), //Doesn't support per-output masking, as it doesn't make sense for cosine proximity
new LossHinge(), new LossKLD(), new LossKLD(), new LossL1(), new LossL2(), new LossMAE(),
new LossMAE(), new LossMAPE(), new LossMAPE(),
// new LossMCXENT(), //Per output masking on MCXENT+Softmax: not yet supported
new LossMCXENT(), new LossMSE(), new LossMSE(), new LossMSLE(), new LossMSLE(),
new LossNegativeLogLikelihood(), new LossPoisson(), new LossSquaredHinge()};
Activation[] act = new Activation[] {Activation.SIGMOID, //XENT
// Activation.TANH,
Activation.TANH, //Hinge
Activation.SIGMOID, //KLD
Activation.SOFTMAX, //KLD + softmax
Activation.TANH, //L1
Activation.TANH, //L2
Activation.TANH, //MAE
Activation.SOFTMAX, //MAE + softmax
Activation.TANH, //MAPE
Activation.SOFTMAX, //MAPE + softmax
// Activation.SOFTMAX, //MCXENT + softmax: see comment above
Activation.SIGMOID, //MCXENT + sigmoid
Activation.TANH, //MSE
Activation.SOFTMAX, //MSE + softmax
Activation.SIGMOID, //MSLE - needs positive labels/activations (due to log)
Activation.SOFTMAX, //MSLE + softmax
Activation.SIGMOID, //NLL
Activation.SIGMOID, //Poisson
Activation.TANH //Squared hinge
};
for (INDArray labelMask : labelMasks) {
int minibatch = labelMask.size(0);
int nOut = labelMask.size(1);
for (int i = 0; i < lossFunctions.length; i++) {
ILossFunction lf = lossFunctions[i];
Activation a = act[i];
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().updater(Updater.NONE)
.weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).seed(12345)
.list()
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
.build())
.layer(1, new OutputLayer.Builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
.activation(a).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
net.setLayerMaskArrays(null, labelMask);
INDArray[] fl = LossFunctionGradientCheck.getFeaturesAndLabels(lf, minibatch, nIn, nOut, 12345);
INDArray features = fl[0];
INDArray labels = fl[1];
net.setInput(features);
net.setLabels(labels);
net.computeGradientAndScore();
double score1 = net.score();
INDArray grad1 = net.gradient().gradient();
//Now: change the label values for the masked steps. The
INDArray maskZeroLocations = Nd4j.getExecutioner().execAndReturn(new Not(labelMask.dup()));
INDArray rand = Nd4j.rand(maskZeroLocations.shape()).muli(0.5);
INDArray newLabels = labels.add(rand.muli(maskZeroLocations)); //Only the masked values are changed
net.setLabels(newLabels);
net.computeGradientAndScore();
assertNotEquals(labels, newLabels);
double score2 = net.score();
INDArray grad2 = net.gradient().gradient();
assertEquals(score1, score2, 1e-6);
assertEquals(grad1, grad2);
//Do the same for CompGraph
ComputationGraphConfiguration conf2 = new NeuralNetConfiguration.Builder().updater(Updater.NONE)
.weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).seed(12345)
.graphBuilder().addInputs("in")
.addLayer("0", new DenseLayer.Builder().nIn(nIn).nOut(layerSize)
.activation(Activation.TANH).build(), "in")
.addLayer("1", new OutputLayer.Builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
.activation(a).build(), "0")
.setOutputs("1").build();
ComputationGraph graph = new ComputationGraph(conf2);
graph.init();
graph.setLayerMaskArrays(null, new INDArray[] {labelMask});
graph.setInputs(features);
graph.setLabels(labels);
graph.computeGradientAndScore();
double gScore1 = graph.score();
INDArray gGrad1 = graph.gradient().gradient();
graph.setLabels(newLabels);
graph.computeGradientAndScore();
double gScore2 = graph.score();
INDArray gGrad2 = graph.gradient().gradient();
assertEquals(gScore1, gScore2, 1e-6);
assertEquals(gGrad1, gGrad2);
}
}
}
}
| |
package org.vertexium.elasticsearch7.lucene;
import org.vertexium.VertexiumException;
import java.util.Locale;
public class EscapeQuerySyntax {
private static final String[] escapableTermExtraFirstChars = {"+", "-", "@"};
private static final String[] escapableTermChars = {"\"", "<", ">", "=",
"!", "(", ")", "^", "[", "{", ":", "]", "}", "~", "/"};
// TODO: check what to do with these "*", "?", "\\"
private static final String[] escapableQuotedChars = {"\""};
private static final String[] escapableWhiteChars = {" ", "\t", "\n", "\r",
"\f", "\b", "\u3000"};
private static final String[] escapableWordTokens = {"AND", "OR", "NOT",
"TO", "WITHIN", "SENTENCE", "PARAGRAPH", "INORDER"};
private static CharSequence escapeChar(CharSequence str, Locale locale) {
if (str == null || str.length() == 0) {
return str;
}
CharSequence buffer = str;
// regular escapable Char for terms
for (String escapableTermChar : escapableTermChars) {
buffer = replaceIgnoreCase(buffer, escapableTermChar.toLowerCase(locale), "\\", locale);
}
// First Character of a term as more escaping chars
for (String escapableTermExtraFirstChar : escapableTermExtraFirstChars) {
if (buffer.charAt(0) == escapableTermExtraFirstChar.charAt(0)) {
buffer = "\\" + buffer.charAt(0) + buffer.subSequence(1, buffer.length());
break;
}
}
return buffer;
}
private CharSequence escapeQuoted(CharSequence str, Locale locale) {
if (str == null || str.length() == 0) {
return str;
}
CharSequence buffer = str;
for (int i = 0; i < escapableQuotedChars.length; i++) {
buffer = replaceIgnoreCase(buffer, escapableTermChars[i].toLowerCase(locale), "\\", locale);
}
return buffer;
}
public static CharSequence escapeTerm(CharSequence term, Locale locale) {
if (term == null) {
return term;
}
// Escape single Chars
term = escapeChar(term, locale);
term = escapeWhiteChar(term, locale);
// Escape Parser Words
for (String escapableWordToken : escapableWordTokens) {
if (escapableWordToken.equalsIgnoreCase(term.toString())) {
return "\\" + term;
}
}
return term;
}
/**
* replace with ignore case
*
* @param string string to get replaced
* @param sequence1 the old character sequence in lowercase
* @param escapeChar the new character to prefix sequence1 in return string.
* @return the new String
*/
private static CharSequence replaceIgnoreCase(CharSequence string, CharSequence sequence1, CharSequence escapeChar, Locale locale) {
if (escapeChar == null || sequence1 == null || string == null) {
throw new NullPointerException();
}
// empty string case
int count = string.length();
int sequence1Length = sequence1.length();
if (sequence1Length == 0) {
StringBuilder result = new StringBuilder((count + 1)
* escapeChar.length());
result.append(escapeChar);
for (int i = 0; i < count; i++) {
result.append(string.charAt(i));
result.append(escapeChar);
}
return result.toString();
}
// normal case
StringBuilder result = new StringBuilder();
char first = sequence1.charAt(0);
int start = 0, copyStart = 0, firstIndex;
while (start < count) {
if ((firstIndex = string.toString().toLowerCase(locale).indexOf(first, start)) == -1) {
break;
}
boolean found = true;
if (sequence1.length() > 1) {
if (firstIndex + sequence1Length > count) {
break;
}
for (int i = 1; i < sequence1Length; i++) {
if (string.toString().toLowerCase(locale).charAt(firstIndex + i) != sequence1
.charAt(i)) {
found = false;
break;
}
}
}
if (found) {
result.append(string.toString(), copyStart, firstIndex);
result.append(escapeChar);
result.append(string.toString(), firstIndex, firstIndex + sequence1Length);
copyStart = start = firstIndex + sequence1Length;
} else {
start = firstIndex + 1;
}
}
if (result.length() == 0 && copyStart == 0) {
return string;
}
result.append(string.toString().substring(copyStart));
return result.toString();
}
/**
* escape all tokens that are part of the parser syntax on a given string
*
* @param str string to get replaced
* @param locale locale to be used when performing string compares
* @return the new String
*/
private static CharSequence escapeWhiteChar(CharSequence str, Locale locale) {
if (str == null || str.length() == 0) {
return str;
}
CharSequence buffer = str;
for (String escapableWhiteChar : escapableWhiteChars) {
buffer = replaceIgnoreCase(buffer, escapableWhiteChar.toLowerCase(locale), "\\", locale);
}
return buffer;
}
/**
* Returns a String where the escape char has been removed, or kept only once
* if there was a double escape.
* <p>
* Supports escaped unicode characters, e. g. translates <code>A</code> to
* <code>A</code>.
*/
public static String discardEscapeChar(CharSequence input) {
// Create char array to hold unescaped char sequence
char[] output = new char[input.length()];
// The length of the output can be less than the input
// due to discarded escape chars. This variable holds
// the actual length of the output
int length = 0;
// We remember whether the last processed character was
// an escape character
boolean lastCharWasEscapeChar = false;
// The multiplier the current unicode digit must be multiplied with.
// E. g. the first digit must be multiplied with 16^3, the second with
// 16^2...
int codePointMultiplier = 0;
// Used to calculate the codepoint of the escaped unicode character
int codePoint = 0;
for (int i = 0; i < input.length(); i++) {
char curChar = input.charAt(i);
if (codePointMultiplier > 0) {
codePoint += hexToInt(curChar) * codePointMultiplier;
codePointMultiplier >>>= 4;
if (codePointMultiplier == 0) {
output[length++] = (char) codePoint;
codePoint = 0;
}
} else if (lastCharWasEscapeChar) {
if (curChar == 'u') {
// found an escaped unicode character
codePointMultiplier = 16 * 16 * 16;
} else {
// this character was escaped
output[length] = curChar;
length++;
}
lastCharWasEscapeChar = false;
} else {
if (curChar == '\\') {
lastCharWasEscapeChar = true;
} else {
output[length] = curChar;
length++;
}
}
}
if (codePointMultiplier > 0) {
throw new VertexiumException("INVALID_SYNTAX_ESCAPE_UNICODE_TRUNCATION");
}
if (lastCharWasEscapeChar) {
throw new VertexiumException("INVALID_SYNTAX_ESCAPE_CHARACTER");
}
return new String(output, 0, length);
}
/**
* Returns the numeric value of the hexadecimal character
*/
private static int hexToInt(char c) {
if ('0' <= c && c <= '9') {
return c - '0';
} else if ('a' <= c && c <= 'f') {
return c - 'a' + 10;
} else if ('A' <= c && c <= 'F') {
return c - 'A' + 10;
} else {
throw new VertexiumException("INVALID_SYNTAX_ESCAPE_NONE_HEX_UNICODE: " + c);
}
}
}
| |
package org.bouncycastle.openssl;
import java.io.IOException;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import org.bouncycastle.asn1.ASN1EncodableVector;
import org.bouncycastle.asn1.ASN1Integer;
import org.bouncycastle.asn1.ASN1ObjectIdentifier;
import org.bouncycastle.asn1.DERSequence;
import org.bouncycastle.asn1.cms.ContentInfo;
import org.bouncycastle.asn1.oiw.OIWObjectIdentifiers;
import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers;
import org.bouncycastle.asn1.pkcs.PrivateKeyInfo;
import org.bouncycastle.asn1.x509.DSAParameter;
import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo;
import org.bouncycastle.asn1.x9.X9ObjectIdentifiers;
import org.bouncycastle.cert.X509AttributeCertificateHolder;
import org.bouncycastle.cert.X509CRLHolder;
import org.bouncycastle.cert.X509CertificateHolder;
import org.bouncycastle.pkcs.PKCS10CertificationRequest;
import org.bouncycastle.util.Strings;
import org.bouncycastle.util.io.pem.PemGenerationException;
import org.bouncycastle.util.io.pem.PemHeader;
import org.bouncycastle.util.io.pem.PemObject;
import org.bouncycastle.util.io.pem.PemObjectGenerator;
/**
* PEM generator for the original set of PEM objects used in Open SSL.
*/
public class MiscPEMGenerator
implements PemObjectGenerator
{
private static final ASN1ObjectIdentifier[] dsaOids =
{
X9ObjectIdentifiers.id_dsa,
OIWObjectIdentifiers.dsaWithSHA1
};
private static final byte[] hexEncodingTable =
{
(byte)'0', (byte)'1', (byte)'2', (byte)'3', (byte)'4', (byte)'5', (byte)'6', (byte)'7',
(byte)'8', (byte)'9', (byte)'A', (byte)'B', (byte)'C', (byte)'D', (byte)'E', (byte)'F'
};
private final Object obj;
private final PEMEncryptor encryptor;
public MiscPEMGenerator(Object o)
{
this.obj = o; // use of this confuses some earlier JDKs.
this.encryptor = null;
}
public MiscPEMGenerator(Object o, PEMEncryptor encryptor)
{
this.obj = o;
this.encryptor = encryptor;
}
private PemObject createPemObject(Object o)
throws IOException
{
String type;
byte[] encoding;
if (o instanceof PemObject)
{
return (PemObject)o;
}
if (o instanceof PemObjectGenerator)
{
return ((PemObjectGenerator)o).generate();
}
if (o instanceof X509CertificateHolder)
{
type = "CERTIFICATE";
encoding = ((X509CertificateHolder)o).getEncoded();
}
else if (o instanceof X509CRLHolder)
{
type = "X509 CRL";
encoding = ((X509CRLHolder)o).getEncoded();
}
else if (o instanceof X509TrustedCertificateBlock)
{
type = "TRUSTED CERTIFICATE";
encoding = ((X509TrustedCertificateBlock)o).getEncoded();
}
else if (o instanceof PrivateKeyInfo)
{
PrivateKeyInfo info = (PrivateKeyInfo)o;
ASN1ObjectIdentifier algOID = info.getPrivateKeyAlgorithm().getAlgorithm();
if (algOID.equals(PKCSObjectIdentifiers.rsaEncryption))
{
type = "RSA PRIVATE KEY";
encoding = info.parsePrivateKey().toASN1Primitive().getEncoded();
}
else if (algOID.equals(dsaOids[0]) || algOID.equals(dsaOids[1]))
{
type = "DSA PRIVATE KEY";
DSAParameter p = DSAParameter.getInstance(info.getPrivateKeyAlgorithm().getParameters());
ASN1EncodableVector v = new ASN1EncodableVector();
v.add(new ASN1Integer(0));
v.add(new ASN1Integer(p.getP()));
v.add(new ASN1Integer(p.getQ()));
v.add(new ASN1Integer(p.getG()));
BigInteger x = ASN1Integer.getInstance(info.parsePrivateKey()).getValue();
BigInteger y = p.getG().modPow(x, p.getP());
v.add(new ASN1Integer(y));
v.add(new ASN1Integer(x));
encoding = new DERSequence(v).getEncoded();
}
else if (algOID.equals(X9ObjectIdentifiers.id_ecPublicKey))
{
type = "EC PRIVATE KEY";
encoding = info.parsePrivateKey().toASN1Primitive().getEncoded();
}
else
{
throw new IOException("Cannot identify private key");
}
}
else if (o instanceof SubjectPublicKeyInfo)
{
type = "PUBLIC KEY";
encoding = ((SubjectPublicKeyInfo)o).getEncoded();
}
else if (o instanceof X509AttributeCertificateHolder)
{
type = "ATTRIBUTE CERTIFICATE";
encoding = ((X509AttributeCertificateHolder)o).getEncoded();
}
else if (o instanceof org.bouncycastle.pkcs.PKCS10CertificationRequest)
{
type = "CERTIFICATE REQUEST";
encoding = ((PKCS10CertificationRequest)o).getEncoded();
}
else if (o instanceof ContentInfo)
{
type = "PKCS7";
encoding = ((ContentInfo)o).getEncoded();
}
else
{
throw new PemGenerationException("unknown object passed - can't encode.");
}
if (encryptor != null)
{
String dekAlgName = Strings.toUpperCase(encryptor.getAlgorithm());
// Note: For backward compatibility
if (dekAlgName.equals("DESEDE"))
{
dekAlgName = "DES-EDE3-CBC";
}
byte[] iv = encryptor.getIV();
byte[] encData = encryptor.encrypt(encoding);
List headers = new ArrayList(2);
headers.add(new PemHeader("Proc-Type", "4,ENCRYPTED"));
headers.add(new PemHeader("DEK-Info", dekAlgName + "," + getHexEncoded(iv)));
return new PemObject(type, headers, encData);
}
return new PemObject(type, encoding);
}
private String getHexEncoded(byte[] bytes)
throws IOException
{
char[] chars = new char[bytes.length * 2];
for (int i = 0; i != bytes.length; i++)
{
int v = bytes[i] & 0xff;
chars[2 * i] = (char)(hexEncodingTable[(v >>> 4)]);
chars[2 * i + 1] = (char)(hexEncodingTable[v & 0xf]);
}
return new String(chars);
}
public PemObject generate()
throws PemGenerationException
{
try
{
return createPemObject(obj);
}
catch (IOException e)
{
throw new PemGenerationException("encoding exception: " + e.getMessage(), e);
}
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
package fixtures.bodyboolean.implementation;
import retrofit2.Retrofit;
import fixtures.bodyboolean.Bools;
import com.google.common.reflect.TypeToken;
import com.microsoft.rest.ServiceCall;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.ServiceResponseBuilder;
import fixtures.bodyboolean.models.ErrorException;
import java.io.IOException;
import okhttp3.ResponseBody;
import retrofit2.http.Body;
import retrofit2.http.GET;
import retrofit2.http.Headers;
import retrofit2.http.PUT;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in Bools.
*/
public final class BoolsImpl implements Bools {
/** The Retrofit service to perform REST calls. */
private BoolsService service;
/** The service client containing this operation class. */
private AutoRestBoolTestServiceImpl client;
/**
* Initializes an instance of Bools.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public BoolsImpl(Retrofit retrofit, AutoRestBoolTestServiceImpl client) {
this.service = retrofit.create(BoolsService.class);
this.client = client;
}
/**
* The interface defining all the services for Bools to be
* used by Retrofit to perform actually REST calls.
*/
interface BoolsService {
@Headers("Content-Type: application/json; charset=utf-8")
@GET("bool/true")
Observable<Response<ResponseBody>> getTrue();
@Headers("Content-Type: application/json; charset=utf-8")
@PUT("bool/true")
Observable<Response<ResponseBody>> putTrue(@Body boolean boolBody);
@Headers("Content-Type: application/json; charset=utf-8")
@GET("bool/false")
Observable<Response<ResponseBody>> getFalse();
@Headers("Content-Type: application/json; charset=utf-8")
@PUT("bool/false")
Observable<Response<ResponseBody>> putFalse(@Body boolean boolBody);
@Headers("Content-Type: application/json; charset=utf-8")
@GET("bool/null")
Observable<Response<ResponseBody>> getNull();
@Headers("Content-Type: application/json; charset=utf-8")
@GET("bool/invalid")
Observable<Response<ResponseBody>> getInvalid();
}
/**
* Get true Boolean value.
*
* @throws ErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @return the boolean object wrapped in {@link ServiceResponse} if successful.
*/
public ServiceResponse<Boolean> getTrue() throws ErrorException, IOException {
return getTrueAsync().toBlocking().single();
}
/**
* Get true Boolean value.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Boolean> getTrueAsync(final ServiceCallback<Boolean> serviceCallback) {
return ServiceCall.create(getTrueAsync(), serviceCallback);
}
/**
* Get true Boolean value.
*
* @return the observable to the boolean object
*/
public Observable<ServiceResponse<Boolean>> getTrueAsync() {
return service.getTrue()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Boolean>>>() {
@Override
public Observable<ServiceResponse<Boolean>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Boolean> clientResponse = getTrueDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Boolean> getTrueDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<Boolean, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Boolean>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Set Boolean value true.
*
* @param boolBody the boolean value
* @throws ErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @return the {@link ServiceResponse} object if successful.
*/
public ServiceResponse<Void> putTrue(boolean boolBody) throws ErrorException, IOException {
return putTrueAsync(boolBody).toBlocking().single();
}
/**
* Set Boolean value true.
*
* @param boolBody the boolean value
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Void> putTrueAsync(boolean boolBody, final ServiceCallback<Void> serviceCallback) {
return ServiceCall.create(putTrueAsync(boolBody), serviceCallback);
}
/**
* Set Boolean value true.
*
* @param boolBody the boolean value
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putTrueAsync(boolean boolBody) {
return service.putTrue(boolBody)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putTrueDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putTrueDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<Void, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get false Boolean value.
*
* @throws ErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @return the boolean object wrapped in {@link ServiceResponse} if successful.
*/
public ServiceResponse<Boolean> getFalse() throws ErrorException, IOException {
return getFalseAsync().toBlocking().single();
}
/**
* Get false Boolean value.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Boolean> getFalseAsync(final ServiceCallback<Boolean> serviceCallback) {
return ServiceCall.create(getFalseAsync(), serviceCallback);
}
/**
* Get false Boolean value.
*
* @return the observable to the boolean object
*/
public Observable<ServiceResponse<Boolean>> getFalseAsync() {
return service.getFalse()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Boolean>>>() {
@Override
public Observable<ServiceResponse<Boolean>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Boolean> clientResponse = getFalseDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Boolean> getFalseDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<Boolean, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Boolean>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Set Boolean value false.
*
* @param boolBody the boolean value
* @throws ErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @return the {@link ServiceResponse} object if successful.
*/
public ServiceResponse<Void> putFalse(boolean boolBody) throws ErrorException, IOException {
return putFalseAsync(boolBody).toBlocking().single();
}
/**
* Set Boolean value false.
*
* @param boolBody the boolean value
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Void> putFalseAsync(boolean boolBody, final ServiceCallback<Void> serviceCallback) {
return ServiceCall.create(putFalseAsync(boolBody), serviceCallback);
}
/**
* Set Boolean value false.
*
* @param boolBody the boolean value
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putFalseAsync(boolean boolBody) {
return service.putFalse(boolBody)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putFalseDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putFalseDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<Void, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get null Boolean value.
*
* @throws ErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @return the boolean object wrapped in {@link ServiceResponse} if successful.
*/
public ServiceResponse<Boolean> getNull() throws ErrorException, IOException {
return getNullAsync().toBlocking().single();
}
/**
* Get null Boolean value.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Boolean> getNullAsync(final ServiceCallback<Boolean> serviceCallback) {
return ServiceCall.create(getNullAsync(), serviceCallback);
}
/**
* Get null Boolean value.
*
* @return the observable to the boolean object
*/
public Observable<ServiceResponse<Boolean>> getNullAsync() {
return service.getNull()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Boolean>>>() {
@Override
public Observable<ServiceResponse<Boolean>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Boolean> clientResponse = getNullDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Boolean> getNullDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<Boolean, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Boolean>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get invalid Boolean value.
*
* @throws ErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @return the boolean object wrapped in {@link ServiceResponse} if successful.
*/
public ServiceResponse<Boolean> getInvalid() throws ErrorException, IOException {
return getInvalidAsync().toBlocking().single();
}
/**
* Get invalid Boolean value.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Boolean> getInvalidAsync(final ServiceCallback<Boolean> serviceCallback) {
return ServiceCall.create(getInvalidAsync(), serviceCallback);
}
/**
* Get invalid Boolean value.
*
* @return the observable to the boolean object
*/
public Observable<ServiceResponse<Boolean>> getInvalidAsync() {
return service.getInvalid()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Boolean>>>() {
@Override
public Observable<ServiceResponse<Boolean>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Boolean> clientResponse = getInvalidDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Boolean> getInvalidDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<Boolean, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Boolean>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.devutils.inspector;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import org.apache.wicket.Component;
import org.apache.wicket.MarkupContainer;
import org.apache.wicket.Page;
import org.apache.wicket.PageReference;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.AjaxFallbackLink;
import org.apache.wicket.ajax.markup.html.form.AjaxFallbackButton;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.core.util.lang.WicketObjects;
import org.apache.wicket.extensions.markup.html.repeater.data.grid.ICellPopulator;
import org.apache.wicket.extensions.markup.html.repeater.data.table.AbstractColumn;
import org.apache.wicket.extensions.markup.html.repeater.data.table.IColumn;
import org.apache.wicket.extensions.markup.html.repeater.data.table.PropertyColumn;
import org.apache.wicket.extensions.markup.html.repeater.tree.AbstractTree;
import org.apache.wicket.extensions.markup.html.repeater.tree.DefaultTableTree;
import org.apache.wicket.extensions.markup.html.repeater.tree.table.TreeColumn;
import org.apache.wicket.extensions.markup.html.repeater.util.SortableTreeProvider;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.debug.PageView;
import org.apache.wicket.markup.html.form.CheckBox;
import org.apache.wicket.markup.html.form.CheckBoxMultipleChoice;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.panel.GenericPanel;
import org.apache.wicket.markup.repeater.Item;
import org.apache.wicket.markup.repeater.OddEvenItem;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.LoadableDetachableModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.PropertyModel;
import org.apache.wicket.util.io.IClusterable;
import org.apache.wicket.util.lang.Bytes;
import org.apache.wicket.util.string.Strings;
/**
* Enhanced {@link PageView} which displays all <code>Component</code>s and <code>Behavior</code>s
* of a <code>Page</code> in a <code>TableTree</code> representation. <code>Component</code>s and
* <code>Behavior</code>s can be shown based on their statefulness status. There are also filtering
* options to choose the information displayed. Useful for debugging.
*
* @author Bertrand Guay-Paquet
*/
public final class EnhancedPageView extends GenericPanel<Page>
{
private static final long serialVersionUID = 1L;
private ExpandState expandState;
private boolean showStatefulAndParentsOnly;
private boolean showBehaviors;
private List<IColumn<TreeNode, Void>> allColumns;
private List<IColumn<TreeNode, Void>> visibleColumns;
private AbstractTree<TreeNode> componentTree;
/**
* Constructor.
*
* @param id
* See Component
* @param page
* The page to be analyzed
*/
public EnhancedPageView(String id, Page page)
{
this(id, getModelFor(page == null ? null : page.getPageReference()));
}
private static IModel<Page> getModelFor(final PageReference pageRef)
{
return new LoadableDetachableModel<Page>()
{
private static final long serialVersionUID = 1L;
@Override
protected Page load()
{
if (pageRef == null)
return null;
Page page = pageRef.getPage();
return page;
}
};
}
/**
* Constructor.
*
* @param id
* See Component
* @param pageModel
* The page to be analyzed
*/
public EnhancedPageView(String id, IModel<Page> pageModel)
{
super(id, pageModel);
expandState = new ExpandState();
expandState.expandAll();
showStatefulAndParentsOnly = false;
showBehaviors = true;
allColumns = allColumns();
visibleColumns = new ArrayList<>(allColumns);
// Name of page
add(new Label("info", new Model<String>()
{
private static final long serialVersionUID = 1L;
@Override
public String getObject()
{
Page page = getModelObject();
return page == null ? "[Stateless Page]" : page.toString();
}
}));
Model<String> pageRenderDuration = new Model<String>()
{
private static final long serialVersionUID = 1L;
@Override
public String getObject()
{
Page page = getModelObject();
if (page != null)
{
Long renderTime = page.getMetaData(PageView.RENDER_KEY);
if (renderTime != null)
{
return renderTime.toString();
}
}
return "n/a";
}
};
add(new Label("pageRenderDuration", pageRenderDuration));
addTreeControls();
componentTree = newTree();
add(componentTree);
}
private List<IColumn<TreeNode, Void>> allColumns()
{
List<IColumn<TreeNode, Void>> columns = new ArrayList<>();
columns.add(new PropertyColumn<TreeNode, Void>(Model.of("Path"), "path")
{
private static final long serialVersionUID = 1L;
@Override
public String getCssClass()
{
return "col_path";
}
@Override
public String toString()
{
return getDisplayModel().getObject();
}
});
columns.add(new TreeColumn<TreeNode, Void>(Model.of("Tree"))
{
private static final long serialVersionUID = 1L;
@Override
public String toString()
{
return getDisplayModel().getObject();
}
});
columns.add(new PropertyColumn<TreeNode, Void>(Model.of("Stateless"), "stateless")
{
private static final long serialVersionUID = 1L;
@Override
public String getCssClass()
{
return "col_stateless";
}
@Override
public String toString()
{
return getDisplayModel().getObject();
}
});
columns.add(new PropertyColumn<TreeNode, Void>(Model.of("Render time (ms)"), "renderTime")
{
private static final long serialVersionUID = 1L;
@Override
public String getCssClass()
{
return "col_renderTime";
}
@Override
public String toString()
{
return getDisplayModel().getObject();
}
});
columns.add(new AbstractColumn<TreeNode, Void>(Model.of("Size"))
{
private static final long serialVersionUID = 1L;
@Override
public void populateItem(Item<ICellPopulator<TreeNode>> item, String componentId,
IModel<TreeNode> rowModel)
{
item.add(new Label(componentId, Bytes.bytes(rowModel.getObject().getSize())
.toString()));
}
@Override
public String getCssClass()
{
return "col_size";
}
@Override
public String toString()
{
return getDisplayModel().getObject();
}
});
columns.add(new PropertyColumn<TreeNode, Void>(Model.of("Type"), "type")
{
private static final long serialVersionUID = 1L;
@Override
public String toString()
{
return getDisplayModel().getObject();
}
});
columns.add(new PropertyColumn<TreeNode, Void>(Model.of("Model Object"), "model")
{
private static final long serialVersionUID = 1L;
@Override
public String toString()
{
return getDisplayModel().getObject();
}
});
return columns;
}
private void addTreeControls()
{
Form<Void> form = new Form<>("form");
add(form);
form.add(new CheckBox("showStateless", new PropertyModel<Boolean>(this,
"showStatefulAndParentsOnly")));
form.add(new CheckBox("showBehaviors", new PropertyModel<Boolean>(this, "showBehaviors")));
form.add(new CheckBoxMultipleChoice<>("visibleColumns",
new PropertyModel<List<IColumn<TreeNode, Void>>>(this, "visibleColumns"), allColumns).setSuffix(" "));
form.add(new AjaxFallbackButton("submit", form)
{
private static final long serialVersionUID = 1L;
@Override
protected void onAfterSubmit(Optional<AjaxRequestTarget> target)
{
target.ifPresent(t -> t.add(componentTree));
}
});
add(new AjaxFallbackLink<Void>("expandAll")
{
private static final long serialVersionUID = 1L;
@Override
public void onClick(Optional<AjaxRequestTarget> targetOptional)
{
expandState.expandAll();
targetOptional.ifPresent(target -> target.add(componentTree));
}
});
add(new AjaxFallbackLink<Void>("collapseAll")
{
private static final long serialVersionUID = 1L;
@Override
public void onClick(Optional<AjaxRequestTarget> targetOptional)
{
expandState.collapseAll();
targetOptional.ifPresent(target -> target.add(componentTree));
}
});
}
private AbstractTree<TreeNode> newTree()
{
TreeProvider provider = new TreeProvider();
IModel<Set<TreeNode>> expandStateModel = new LoadableDetachableModel<Set<TreeNode>>()
{
private static final long serialVersionUID = 1L;
@Override
protected Set<TreeNode> load()
{
return expandState;
}
};
AbstractTree<TreeNode> tree = new DefaultTableTree<TreeNode, Void>("tree", visibleColumns,
provider, Integer.MAX_VALUE, expandStateModel)
{
private static final long serialVersionUID = 1L;
@Override
protected Item<TreeNode> newRowItem(String id, int index, IModel<TreeNode> model)
{
return new OddEvenItem<>(id, index, model);
}
};
tree.setOutputMarkupId(true);
return tree;
}
/**
* Tree node representing either a <code>Page</code>, a <code>Component</code> or a
* <code>Behavior</code>
*/
private static class TreeNode
{
public IClusterable node;
public TreeNode parent;
public List<TreeNode> children;
public TreeNode(IClusterable node, TreeNode parent)
{
this.node = node;
this.parent = parent;
children = new ArrayList<>();
if (!(node instanceof Component) && !(node instanceof Behavior))
throw new IllegalArgumentException("Only accepts Components and Behaviors");
}
public boolean hasChildren()
{
return !children.isEmpty();
}
/**
* @return list of indexes to navigate from the root of the tree to this node (e.g. the path
* to the node).
*/
public List<Integer> getPathIndexes()
{
List<Integer> path = new ArrayList<>();
TreeNode nextChild = this;
TreeNode parent;
while ((parent = nextChild.parent) != null)
{
int indexOf = parent.children.indexOf(nextChild);
if (indexOf < 0)
throw new AssertionError("Child not found in parent");
path.add(indexOf);
nextChild = parent;
}
Collections.reverse(path);
return path;
}
public String getPath()
{
if (node instanceof Component)
{
return ((Component)node).getPath();
}
else
{
Behavior behavior = (Behavior)node;
Component parent = (Component)this.parent.node;
String parentPath = parent.getPath();
int indexOf = parent.getBehaviors().indexOf(behavior);
return parentPath + Component.PATH_SEPARATOR + "Behavior_" + indexOf;
}
}
public String getRenderTime()
{
if (node instanceof Component)
{
Long renderDuration = ((Component)node).getMetaData(PageView.RENDER_KEY);
if (renderDuration != null)
{
return renderDuration.toString();
}
}
return "n/a";
}
public long getSize()
{
if (node instanceof Component)
{
long size = ((Component)node).getSizeInBytes();
return size;
}
else
{
long size = WicketObjects.sizeof(node);
return size;
}
}
public String getType()
{
// anonymous class? Get the parent's class name
String type = node.getClass().getName();
if (type.indexOf("$") > 0)
{
type = node.getClass().getSuperclass().getName();
}
return type;
}
public String getModel()
{
if (node instanceof Component)
{
String model;
try
{
model = ((Component)node).getDefaultModelObjectAsString();
}
catch (Exception e)
{
model = e.getMessage();
}
return model;
}
return null;
}
public boolean isStateless()
{
if (node instanceof Page)
{
return ((Page)node).isPageStateless();
}
else if (node instanceof Component)
{
return ((Component)node).isStateless();
}
else
{
Behavior behavior = (Behavior)node;
Component parent = (Component)this.parent.node;
return behavior.getStatelessHint(parent);
}
}
@Override
public String toString()
{
if (node instanceof Page)
{
// Last component of getType() i.e. almost the same as getClass().getSimpleName();
String type = getType();
type = Strings.lastPathComponent(type, '.');
return type;
}
else if (node instanceof Component)
{
return ((Component)node).getId();
}
else
{
// Last component of getType() i.e. almost the same as getClass().getSimpleName();
String type = getType();
type = Strings.lastPathComponent(type, '.');
return type;
}
}
}
/**
* TreeNode provider for the page. Provides nodes for the components and behaviors of the
* analyzed page.
*/
private class TreeProvider extends SortableTreeProvider<TreeNode, Void>
{
private static final long serialVersionUID = 1L;
private TreeModel componentTreeModel = new TreeModel();
@Override
public void detach()
{
componentTreeModel.detach();
}
@Override
public Iterator<? extends TreeNode> getRoots()
{
TreeNode tree = componentTreeModel.getObject();
List<TreeNode> roots;
if (tree == null)
roots = Collections.emptyList();
else
roots = Arrays.asList(tree);
return roots.iterator();
}
@Override
public boolean hasChildren(TreeNode node)
{
return node.hasChildren();
}
@Override
public Iterator<? extends TreeNode> getChildren(TreeNode node)
{
return node.children.iterator();
}
@Override
public IModel<TreeNode> model(TreeNode object)
{
return new TreeNodeModel(object);
}
/**
* Model of the page component and behavior tree
*/
private class TreeModel extends LoadableDetachableModel<TreeNode>
{
private static final long serialVersionUID = 1L;
@Override
protected TreeNode load()
{
Page page = getModelObject();
if (page == null)
return null;
return buildTree(page, null);
}
private TreeNode buildTree(Component node, TreeNode parent)
{
TreeNode treeNode = new TreeNode(node, parent);
List<TreeNode> children = treeNode.children;
// Add its behaviors
if (showBehaviors)
{
for (Behavior behavior : node.getBehaviors())
{
if (!showStatefulAndParentsOnly || !behavior.getStatelessHint(node))
children.add(new TreeNode(behavior, treeNode));
}
}
// Add its children
if (node instanceof MarkupContainer)
{
MarkupContainer container = (MarkupContainer)node;
for (Component child : container)
{
buildTree(child, treeNode);
}
}
// Sort the children list, putting behaviors first
Collections.sort(children, new Comparator<TreeNode>()
{
@Override
public int compare(TreeNode o1, TreeNode o2)
{
if (o1.node instanceof Component)
{
if (o2.node instanceof Component)
{
return o1.getPath().compareTo((o2).getPath());
}
else
{
return 1;
}
}
else
{
if (o2.node instanceof Component)
{
return -1;
}
else
{
return o1.getPath().compareTo((o2).getPath());
}
}
}
});
// Add this node to its parent if
// -it has children or
// -it is stateful or
// -stateless components are visible
if (parent != null &&
(!showStatefulAndParentsOnly || treeNode.hasChildren() || !node.isStateless()))
{
parent.children.add(treeNode);
}
return treeNode;
}
}
/**
* Rertrieves a TreeNode based on its path
*/
private class TreeNodeModel extends LoadableDetachableModel<TreeNode>
{
private static final long serialVersionUID = 1L;
private List<Integer> path;
public TreeNodeModel(TreeNode treeNode)
{
super(treeNode);
path = treeNode.getPathIndexes();
}
@Override
protected TreeNode load()
{
TreeNode tree = componentTreeModel.getObject();
TreeNode currentItem = tree;
for (Integer index : path)
{
currentItem = currentItem.children.get(index);
}
return currentItem;
}
/**
* Important! Models must be identifyable by their contained object.
*/
@Override
public int hashCode()
{
return path.hashCode();
}
/**
* Important! Models must be identifyable by their contained object.
*/
@Override
public boolean equals(Object obj)
{
if (obj instanceof TreeNodeModel)
{
return ((TreeNodeModel)obj).path.equals(path);
}
return false;
}
}
}
/**
* Expansion state of the tree's nodes
*/
private static class ExpandState implements Set<TreeNode>, IClusterable
{
private static final long serialVersionUID = 1L;
private HashSet<List<Integer>> set = new HashSet<>();
private boolean reversed = false;
public void expandAll()
{
set.clear();
reversed = true;
}
public void collapseAll()
{
set.clear();
reversed = false;
}
@Override
public boolean add(TreeNode a_e)
{
List<Integer> pathIndexes = a_e.getPathIndexes();
if (reversed)
{
return set.remove(pathIndexes);
}
else
{
return set.add(pathIndexes);
}
}
@Override
public boolean remove(Object a_o)
{
TreeNode item = (TreeNode)a_o;
List<Integer> pathIndexes = item.getPathIndexes();
if (reversed)
{
return set.add(pathIndexes);
}
else
{
return set.remove(pathIndexes);
}
}
@Override
public boolean contains(Object a_o)
{
TreeNode item = (TreeNode)a_o;
List<Integer> pathIndexes = item.getPathIndexes();
if (reversed)
{
return !set.contains(pathIndexes);
}
else
{
return set.contains(pathIndexes);
}
}
@Override
public int size()
{
throw new UnsupportedOperationException();
}
@Override
public boolean isEmpty()
{
throw new UnsupportedOperationException();
}
@Override
public Iterator<TreeNode> iterator()
{
throw new UnsupportedOperationException();
}
@Override
public Object[] toArray()
{
throw new UnsupportedOperationException();
}
@Override
public <T> T[] toArray(T[] a_a)
{
throw new UnsupportedOperationException();
}
@Override
public boolean containsAll(Collection<?> a_c)
{
throw new UnsupportedOperationException();
}
@Override
public boolean addAll(Collection<? extends TreeNode> a_c)
{
throw new UnsupportedOperationException();
}
@Override
public boolean retainAll(Collection<?> a_c)
{
throw new UnsupportedOperationException();
}
@Override
public boolean removeAll(Collection<?> a_c)
{
throw new UnsupportedOperationException();
}
@Override
public void clear()
{
throw new UnsupportedOperationException();
}
}
}
| |
/**
* Copyright (c) 2009 Dumontierlab
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.dumontierlab.pdb2rdf;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.zip.GZIPOutputStream;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
import org.apache.jena.riot.Lang;
import org.apache.jena.riot.RDFDataMgr;
import org.apache.log4j.Logger;
import org.xml.sax.InputSource;
import com.dumontierlab.pdb2rdf.dao.VirtuosoDaoFactory;
import com.dumontierlab.pdb2rdf.model.PdbRdfModel;
import com.dumontierlab.pdb2rdf.model.VirtPdbRdfModel;
import com.dumontierlab.pdb2rdf.parser.DetailLevel;
import com.dumontierlab.pdb2rdf.parser.PdbXmlParser;
import com.dumontierlab.pdb2rdf.parser.vocabulary.PdbOwlVocabulary;
import com.dumontierlab.pdb2rdf.parser.vocabulary.uri.Bio2RdfPdbUriPattern;
import com.dumontierlab.pdb2rdf.parser.vocabulary.uri.UriBuilder;
import com.dumontierlab.pdb2rdf.util.ClusterIterator;
import com.dumontierlab.pdb2rdf.util.ConsoleProgressMonitorImpl;
import com.dumontierlab.pdb2rdf.util.DirectoryIterator;
import com.dumontierlab.pdb2rdf.util.FileIterator;
import com.dumontierlab.pdb2rdf.util.InputInterator;
import com.dumontierlab.pdb2rdf.util.Pdb2RdfInputIterator;
import com.dumontierlab.pdb2rdf.util.Pdb2RdfInputIteratorAdapter;
import com.dumontierlab.pdb2rdf.util.PdbsIterator;
import com.dumontierlab.pdb2rdf.util.ProgressMonitor;
import com.dumontierlab.pdb2rdf.util.Statistics;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.RDFWriter;
import com.hp.hpl.jena.rdf.model.RDFWriterF;
import com.hp.hpl.jena.rdf.model.impl.RDFWriterFImpl;
import com.hp.hpl.jena.shared.NoWriterForLangException;
import com.hp.hpl.jena.tdb.TDBFactory;
/**
* @autor Jose Cruz-Toledo
* @author Alexander De Leon
*/
public class Pdb2Rdf {
static final Logger LOG = Logger.getLogger(Pdb2Rdf.class);
private static final String STATSFILE_NAME = "pdb2rdf-stats.txt";
public static void main(String[] args) {
Options options = createOptions();
CommandLineParser parser = createCliParser();
try {
CommandLine cmd = parser.parse(options, args);
if (cmd.hasOption("help")) {
printUsage();
}
Map<String, Double> stats = null;
if (cmd.hasOption("stats")) {
stats = new HashMap<String, Double>();
}
if (cmd.hasOption("statsFromRDF")) {
generateStatsFromRDF(cmd);
} else if (cmd.hasOption("load")) {
load(cmd, stats);
} else if (cmd.hasOption("ontology")) {
printOntology();
} else {
printRdf(cmd, stats);
}
if (stats != null) {
try {
outputStats(cmd, stats);
} catch (FileNotFoundException e) {
LOG.warn("Unable to write statistics file", e);
}
}
} catch (ParseException e) {
LOG.fatal("Unable understand your command.");
printUsage();
System.exit(1);
}
}
private static void generateStatsFromRDF(CommandLine cmd) {
String dir = cmd.getOptionValue("dir");
if (dir == null) {
LOG.fatal("Need to specify -dir with -statsFromRDF");
System.exit(1);
}
Map<String, Double> stats = new HashMap<String, Double>();
Statistics statistics = new Statistics();
try {
InputInterator input = new DirectoryIterator(new File(dir),
cmd.hasOption("gzip"));
while (input.hasNext()) {
try {
Model model = ModelFactory.createDefaultModel();
model.read(input.next(), "");
statistics.mergeStats(statistics.getStatistics(model),
stats);
} catch (Exception e) {
LOG.warn("Fail to read input file", e);
}
}
outputStats(cmd, stats);
} catch (IOException e) {
LOG.fatal("Unable to read files form " + dir);
System.exit(1);
}
}
private static void updateStats(Map<String, Double> stats, PdbRdfModel model) {
Statistics statsFactory = new Statistics();
try {
statsFactory.mergeStats(statsFactory.getStatistics(model), stats);
} catch (Exception e) {
String id = null;
if (model != null) {
id = model.getPdbId();
}
LOG.error("Unable to count statistics for PDB: " + id, e);
}
}
private static void outputStats(CommandLine cmd, Map<String, Double> stats)
throws FileNotFoundException {
File outputDir = getOutputDirectory(cmd);
File statsFile = null;
if (outputDir != null) {
statsFile = new File(outputDir, STATSFILE_NAME);
} else {
statsFile = new File(STATSFILE_NAME);
}
PrintWriter out = new PrintWriter(statsFile);
try {
for (Map.Entry<String, Double> stat : stats.entrySet()) {
out.println(stat.getKey() + ": " + stat.getValue());
}
out.flush();
} finally {
out.close();
}
}
private static void printOntology() {
PdbOwlVocabulary.getOntology().write(System.out);
}
@SuppressWarnings("unused")
private static void printRdf(final CommandLine cmd) {
printRdf(cmd, null);
}
private static void printRdf(final CommandLine cmd,
final Map<String, Double> stats) {
final File outDir = getOutputDirectory(cmd);
final RDFWriter writer = getWriter(cmd);
final ProgressMonitor monitor = getProgressMonitor();
Pdb2RdfInputIterator i = processInput(cmd);
final int inputSize = i.size();
final AtomicInteger progressCount = new AtomicInteger();
ExecutorService pool = null;
if (outDir != null) {
pool = getThreadPool(cmd);
} else {
// if output is going to the STDOUT then we need to do process in
// sequential mode.
pool = Executors.newSingleThreadExecutor();
}
final Object lock = new Object();
while (i.hasNext()) {
final InputSource input = i.next();
pool.execute(new Runnable() {
@Override
public void run() {
OutputStream out = System.out;
PdbXmlParser parser = new PdbXmlParser();
PdbRdfModel model = null;
try {
if (cmd.hasOption("detailLevel")) {
try {
DetailLevel detailLevel = Enum.valueOf(
DetailLevel.class,
cmd.getOptionValue("detailLevel"));
model = parser.parse(input, new PdbRdfModel(),
detailLevel);
} catch (IllegalArgumentException e) {
LOG.fatal(
"Invalid argument value for detailLevel option",
e);
System.exit(1);
}
} else {
model = parser.parse(input, new PdbRdfModel());
}
// add the input file information
model.addInputFileInformation();
// add the outputFile information();
model.addRDFFileInformation();
if (outDir != null) {
File directory = new File(outDir, model.getPdbId()
.substring(1, 3));
synchronized (lock) {
if (!directory.exists()) {
directory.mkdir();
}
}
File file = new File(directory, model.getPdbId()
+ ".rdf.gz");
out = new GZIPOutputStream(new FileOutputStream(
file));
}
if (cmd.hasOption("format")) {
if (cmd.getOptionValue("format").equalsIgnoreCase(
"NQUADs")) {
Dataset ds = TDBFactory.createDataset();
ds.addNamedModel(model.getDatasetResource()
.toString(), model);
StringWriter sw = new StringWriter();
RDFDataMgr.write(sw, ds, Lang.NQUADS);
out.write(sw.toString().getBytes(
Charset.forName("UTF-8")));
ds.close();
}
}
writer.write(model, out, null);
if (stats != null) {
updateStats(stats, model);
}
if (monitor != null) {
monitor.setProgress(
progressCount.incrementAndGet(), inputSize);
}
} catch (Exception e) {
String id = null;
if (model != null) {
id = model.getPdbId();
}
LOG.error("Unable to parse input for PDB: " + id, e);
} finally {
try {
out.close();
} catch (IOException e) {
LOG.error("Unable to close output stream", e);
}
}
}
});
}
pool.shutdown();
while (!pool.isTerminated()) {
try {
pool.awaitTermination(1, TimeUnit.SECONDS);
} catch (InterruptedException e) {
break;
}
}
}
private static RDFWriter getWriter(CommandLine cmd) {
RDFWriterF writerFactory = new RDFWriterFImpl();
RDFWriter writer = writerFactory.getWriter("RDF/XML");
if (cmd.hasOption("format")) {
if (!cmd.getOptionValue("format").equalsIgnoreCase("NQUADS")) {
try {
writer = writerFactory.getWriter(cmd
.getOptionValue("format"));
} catch (NoWriterForLangException e) {
System.out.println("Invalid format option selected!");
e.printStackTrace();
System.exit(0);
}
}
}
return writer;
}
private static File getOutputDirectory(CommandLine cmd) {
if (cmd.hasOption("out")) {
File outDir = new File(cmd.getOptionValue("out"));
if (!outDir.isDirectory()) {
LOG.fatal("The out paramater must specify a directory");
System.exit(1);
}
return outDir;
}
return null;
}
private static void load(CommandLine cmd) {
load(cmd, null);
}
private static void load(CommandLine cmd, final Map<String, Double> stats) {
String username = "dba";
String password = "dba";
String host = "localhost";
int port = 1111;
DetailLevel detailLevel = null;
if (cmd.hasOption("detailLevel")) {
try {
detailLevel = Enum.valueOf(DetailLevel.class,
cmd.getOptionValue("detailLevel"));
} catch (IllegalArgumentException e) {
LOG.fatal("Invalid argument value for detailLevel option", e);
System.exit(1);
}
}
final DetailLevel f_detailLevel = detailLevel;
if (cmd.hasOption("username")) {
username = cmd.getOptionValue("username");
}
if (cmd.hasOption("password")) {
password = cmd.getOptionValue("password");
}
if (cmd.hasOption("host")) {
host = cmd.getOptionValue("host");
}
if (cmd.hasOption("port")) {
try {
port = Integer.parseInt(cmd.getOptionValue("port"));
} catch (NumberFormatException e) {
LOG.fatal("Invalid port number: " + cmd.getOptionValue("port"));
System.exit(1);
}
}
final VirtuosoDaoFactory factory = new VirtuosoDaoFactory(host, port,
username, password);
ExecutorService pool = getThreadPool(cmd);
final ProgressMonitor monitor = getProgressMonitor();
final Pdb2RdfInputIterator i = processInput(cmd);
final int inputSize = i.size();
final AtomicInteger progressCount = new AtomicInteger();
if (monitor != null) {
monitor.setProgress(0, inputSize);
}
while (i.hasNext()) {
final InputSource input = i.next();
pool.execute(new Runnable() {
public void run() {
PdbXmlParser parser = new PdbXmlParser();
UriBuilder uriBuilder = new UriBuilder();
PdbRdfModel model = null;
try {
model = new VirtPdbRdfModel(factory,
Bio2RdfPdbUriPattern.PDB_GRAPH, uriBuilder,
factory.getTripleStoreDao());
if (f_detailLevel != null) {
parser.parse(input, model, f_detailLevel);
} else {
parser.parse(input, model);
}
if (stats != null) {
updateStats(stats, model);
}
if (monitor != null) {
monitor.setProgress(
progressCount.incrementAndGet(), inputSize);
}
} catch (Exception e) {
LOG.error("Uanble to parse input for pdb="
+ (model != null ? model.getPdbId() : "null"),
e);
}
}
});
}
pool.shutdown();
while (!pool.isTerminated()) {
try {
pool.awaitTermination(1, TimeUnit.SECONDS);
} catch (InterruptedException e) {
break;
}
}
}
private static ProgressMonitor getProgressMonitor() {
try {
return new ConsoleProgressMonitorImpl();
} catch (IOException e) {
LOG.warn("Unable to create progress monitor");
return null;
}
}
private static void printUsage() {
HelpFormatter helpFormatter = new HelpFormatter();
helpFormatter.printHelp(
"pdb2rdf [OPTIONS] [[PDB ID 1] [PDB ID 2] ...]",
createOptions());
}
private static CommandLineParser createCliParser() {
return new GnuParser();
}
private static Pdb2RdfInputIterator processInput(CommandLine cmd) {
boolean gzip = cmd.hasOption("gzip");
try {
if (cmd.hasOption("file")) {
File file = new File(cmd.getOptionValue("file"));
if (!file.exists() || !file.canRead()) {
LOG.fatal("Cannot access file: " + file);
System.exit(1);
}
return new Pdb2RdfInputIteratorAdapter(new FileIterator(file,
gzip));
} else if (cmd.hasOption("dir")) {
File dir = new File(cmd.getOptionValue("dir"));
if (!dir.exists() || !dir.canRead() || !dir.canExecute()) {
LOG.fatal("Cannot access directory: " + dir);
System.exit(1);
}
return new Pdb2RdfInputIteratorAdapter(new DirectoryIterator(
dir, gzip));
} else if (cmd.hasOption("cluster")) {
String url = cmd.getOptionValue("cluster");
return new ClusterIterator(url);
} else {
String[] args = cmd.getArgs();
if (args.length == 0) {
LOG.fatal("You need to specified the file option, the dir option, or explicitly list the pdb ids.");
printUsage();
System.exit(1);
}
return new PdbsIterator(args);
}
} catch (Exception e) {
LOG.fatal(e);
System.exit(1);
return null;
}
}
@SuppressWarnings("static-access")
private static Options createOptions() {
Options options = new Options();
options.addOption("help", false, "Print this message");
Option formatOption = OptionBuilder
.withArgName("RDF/XML|N-TRIPLE|N3|NQUADS").hasOptionalArgs(1)
.withDescription("RDF output format (default: RDF/XMl)")
.hasArg(true).create("format");
options.addOption(formatOption);
Option dirOption = OptionBuilder.withArgName("path")
.withDescription("Directory where input files are located")
.hasArg(true).create("dir");
options.addOption(dirOption);
Option clusterOption = OptionBuilder
.withArgName("URL")
.withDescription(
"URL of the cluster head where input will be acquired")
.hasArg(true).create("cluster");
options.addOption(clusterOption);
Option fileOption = OptionBuilder.withArgName("path")
.withDescription("Input file").hasArg(true).create("file");
options.addOption(fileOption);
options.addOption("gzip", false, "Input is given as gzip file(s)");
Option outDirOption = OptionBuilder
.withArgName("path")
.withDescription(
"Directory where output RDF files will be created")
.hasArg(true).create("out");
options.addOption(outDirOption);
options.addOption("ontology", false,
"Prints the ontology for the PDB namespace");
Option threadsOption = OptionBuilder
.withArgName("number")
.withDescription(
"Number of threads (default: number of processing units * 2)")
.hasArg(true).create("threads");
options.addOption(threadsOption);
options.addOption(
"stats",
false,
"Outputs statistics to file pdb2rdf-stats.txt (in output directory, if one is specified, or in the current directory otherwise)");
options.addOption(
"statsFromRDF",
false,
"Generates statistics from RDF files (located in the directory spefied by -dir). The stats are output to the file pdb2rdf-stats.txt (in output directory, if one is specified, or in the current directory otherwise)");
Option noAtomSitesOption = OptionBuilder
.hasArg(true)
.withDescription(
"Specify detail level: COMPLETE | ATOM | RESIDUE | EXPERIMENT | METADATA ")
.create("detailLevel");
options.addOption(noAtomSitesOption);
return options;
}
private static int getNumberOfThreads(CommandLine cmd) {
int numberOfThreads = Runtime.getRuntime().availableProcessors();
if (cmd.hasOption("threads")) {
try {
numberOfThreads = Integer.parseInt(cmd
.getOptionValue("threads"));
} catch (NumberFormatException e) {
LOG.fatal("Invalid number of threads", e);
System.exit(1);
}
}
return numberOfThreads;
}
private static ExecutorService getThreadPool(CommandLine cmd) {
// twice the number of PU
final Object monitor = new Object();
int numberOfThreads = getNumberOfThreads(cmd);
LOG.info("Using " + numberOfThreads + " threads.");
ThreadPoolExecutor threadPool = new ThreadPoolExecutor(numberOfThreads,
numberOfThreads, 10, TimeUnit.MINUTES,
new ArrayBlockingQueue<Runnable>(1),
new RejectedExecutionHandler() {
@Override
public void rejectedExecution(Runnable r,
ThreadPoolExecutor executor) {
synchronized (monitor) {
try {
monitor.wait();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
executor.execute(r);
}
}) {
@Override
protected void afterExecute(Runnable r, Throwable t) {
synchronized (monitor) {
monitor.notify();
}
super.afterExecute(r, t);
}
};
return threadPool;
}
}
| |
package us.deathmarine.luyten;
import java.awt.Cursor;
import java.awt.Panel;
import java.awt.Rectangle;
import java.awt.event.AdjustmentEvent;
import java.awt.event.AdjustmentListener;
import java.awt.event.InputEvent;
import java.awt.event.MouseEvent;
import java.awt.event.MouseMotionAdapter;
import java.io.StringWriter;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import javax.swing.JLabel;
import javax.swing.JScrollBar;
import javax.swing.ScrollPaneConstants;
import javax.swing.SwingUtilities;
import javax.swing.event.HyperlinkEvent;
import org.fife.ui.rsyntaxtextarea.LinkGenerator;
import org.fife.ui.rsyntaxtextarea.LinkGeneratorResult;
import org.fife.ui.rsyntaxtextarea.RSyntaxTextArea;
import org.fife.ui.rsyntaxtextarea.SyntaxConstants;
import org.fife.ui.rsyntaxtextarea.Theme;
import org.fife.ui.rtextarea.RTextScrollPane;
import com.strobel.assembler.metadata.MetadataSystem;
import com.strobel.assembler.metadata.TypeDefinition;
import com.strobel.decompiler.DecompilationOptions;
import com.strobel.decompiler.DecompilerSettings;
import com.strobel.decompiler.PlainTextOutput;
import com.strobel.decompiler.languages.Languages;
public class OpenFile implements SyntaxConstants {
public static final HashSet<String> WELL_KNOWN_TEXT_FILE_EXTENSIONS = new HashSet<>(Arrays.asList(
".java", ".xml", ".rss", ".project", ".classpath", ".h", ".sql", ".js", ".php", ".php5",
".phtml", ".html", ".htm", ".xhtm", ".xhtml", ".lua", ".bat", ".pl", ".sh", ".css",
".json", ".txt", ".rb", ".make", ".mak", ".py", ".properties", ".prop"));
// navigation links
private TreeMap<Selection, String> selectionToUniqueStrTreeMap = new TreeMap<>();
private Map<String, Boolean> isNavigableCache = new ConcurrentHashMap<>();
private Map<String, String> readableLinksCache = new ConcurrentHashMap<>();
private volatile boolean isContentValid = false;
private volatile boolean isNavigationLinksValid = false;
private volatile boolean isWaitForLinksCursor = false;
private volatile Double lastScrollPercent = null;
private LinkProvider linkProvider;
private String initialNavigationLink;
private boolean isFirstTimeRun = true;
MainWindow mainWindow;
RTextScrollPane scrollPane;
Panel image_pane;
RSyntaxTextArea textArea;
String name;
String path;
// decompiler and type references (not needed for text files)
private MetadataSystem metadataSystem;
private DecompilerSettings settings;
private DecompilationOptions decompilationOptions;
private TypeDefinition type;
public OpenFile(String name, String path, Theme theme, MainWindow mainWindow) {
this.name = name;
this.path = path;
this.mainWindow = mainWindow;
textArea = new RSyntaxTextArea(25, 70);
textArea.setCaretPosition(0);
textArea.requestFocusInWindow();
textArea.setMarkOccurrences(true);
textArea.setClearWhitespaceLinesEnabled(false);
textArea.setEditable(false);
textArea.setAntiAliasingEnabled(true);
textArea.setCodeFoldingEnabled(true);
if (name.toLowerCase().endsWith(".class")
|| name.toLowerCase().endsWith(".java"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_JAVA);
else if (name.toLowerCase().endsWith(".xml")
|| name.toLowerCase().endsWith(".rss")
|| name.toLowerCase().endsWith(".project")
|| name.toLowerCase().endsWith(".classpath"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_XML);
else if (name.toLowerCase().endsWith(".h"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_C);
else if (name.toLowerCase().endsWith(".sql"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_SQL);
else if (name.toLowerCase().endsWith(".js"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_JAVASCRIPT);
else if (name.toLowerCase().endsWith(".php")
|| name.toLowerCase().endsWith(".php5")
|| name.toLowerCase().endsWith(".phtml"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_PHP);
else if (name.toLowerCase().endsWith(".html")
|| name.toLowerCase().endsWith(".htm")
|| name.toLowerCase().endsWith(".xhtm")
|| name.toLowerCase().endsWith(".xhtml"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_HTML);
else if (name.toLowerCase().endsWith(".js"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_JAVASCRIPT);
else if (name.toLowerCase().endsWith(".lua"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_LUA);
else if (name.toLowerCase().endsWith(".bat"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_WINDOWS_BATCH);
else if (name.toLowerCase().endsWith(".pl"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_PERL);
else if (name.toLowerCase().endsWith(".sh"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_UNIX_SHELL);
else if (name.toLowerCase().endsWith(".css"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_CSS);
else if (name.toLowerCase().endsWith(".json"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_JSON);
else if (name.toLowerCase().endsWith(".txt"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_NONE);
else if (name.toLowerCase().endsWith(".rb"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_RUBY);
else if (name.toLowerCase().endsWith(".make")
|| name.toLowerCase().endsWith(".mak"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_MAKEFILE);
else if (name.toLowerCase().endsWith(".py"))
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_PYTHON);
else
textArea.setSyntaxEditingStyle(SYNTAX_STYLE_PROPERTIES_FILE);
scrollPane = new RTextScrollPane(textArea, true);
scrollPane.setIconRowHeaderEnabled(true);
textArea.setText("");
theme.apply(textArea);
scrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
final JScrollBar verticalScrollbar = scrollPane.getVerticalScrollBar();
if (verticalScrollbar != null) {
verticalScrollbar.addAdjustmentListener(new AdjustmentListener() {
@Override
public void adjustmentValueChanged(AdjustmentEvent e) {
String content = textArea.getText();
if (content == null || content.length() == 0)
return;
int scrollValue = verticalScrollbar.getValue() - verticalScrollbar.getMinimum();
int scrollMax = verticalScrollbar.getMaximum() - verticalScrollbar.getMinimum();
if (scrollMax < 1 || scrollValue < 0 || scrollValue > scrollMax)
return;
lastScrollPercent = (((double) scrollValue) / ((double) scrollMax));
}
});
}
textArea.setHyperlinksEnabled(true);
textArea.setLinkScanningMask(InputEvent.CTRL_DOWN_MASK);
textArea.setLinkGenerator(new LinkGenerator() {
@Override
public LinkGeneratorResult isLinkAtOffset(RSyntaxTextArea textArea, final int offs) {
final String uniqueStr = getUniqueStrForOffset(offs);
final Integer selectionFrom = getSelectionFromForOffset(offs);
if (uniqueStr != null && selectionFrom != null) {
return new LinkGeneratorResult() {
@Override
public HyperlinkEvent execute() {
if (isNavigationLinksValid)
onNavigationClicked(uniqueStr);
return null;
}
@Override
public int getSourceOffset() {
if (isNavigationLinksValid)
return selectionFrom;
return offs;
}
};
}
return null;
}
});
textArea.addMouseMotionListener(new MouseMotionAdapter() {
private boolean isLinkLabelPrev = false;
private String prevLinkText = null;
@Override
public synchronized void mouseMoved(MouseEvent e) {
String linkText = null;
boolean isLinkLabel = false;
boolean isCtrlDown = (e.getModifiersEx() & InputEvent.CTRL_DOWN_MASK) != 0;
if (isCtrlDown) {
linkText = createLinkLabel(e);
isLinkLabel = linkText != null;
}
if (isCtrlDown && isWaitForLinksCursor) {
textArea.setCursor(new Cursor(Cursor.WAIT_CURSOR));
} else if (textArea.getCursor().getType() == Cursor.WAIT_CURSOR) {
textArea.setCursor(new Cursor(Cursor.DEFAULT_CURSOR));
}
JLabel label = OpenFile.this.mainWindow.getLabel();
if (isLinkLabel && isLinkLabelPrev) {
if (!linkText.equals(prevLinkText)) {
setLinkLabel(label, linkText);
}
} else if (isLinkLabel && !isLinkLabelPrev) {
setLinkLabel(label, linkText);
} else if (!isLinkLabel && isLinkLabelPrev) {
setLinkLabel(label, null);
}
isLinkLabelPrev = isLinkLabel;
prevLinkText = linkText;
}
private void setLinkLabel(JLabel label, String text) {
String current = label.getText();
if (text == null && current != null)
if (current.startsWith("Navigating:") || current.startsWith("Cannot navigate:"))
return;
label.setText(text != null ? text : "Complete");
}
private String createLinkLabel(MouseEvent e) {
int offs = textArea.viewToModel(e.getPoint());
if (isNavigationLinksValid) {
return getLinkDescriptionForOffset(offs);
}
return null;
}
});
}
public void setContent(String content) {
textArea.setText(content);
}
public void decompile() {
this.invalidateContent();
// synchronized: do not accept changes from menu while running
synchronized (settings) {
if (Languages.java().getName().equals(settings.getLanguage().getName())) {
decompileWithNavigationLinks();
} else {
decompileWithoutLinks();
}
}
}
private void decompileWithoutLinks() {
this.invalidateContent();
isNavigationLinksValid = false;
textArea.setHyperlinksEnabled(false);
StringWriter stringwriter = new StringWriter();
PlainTextOutput plainTextOutput = new PlainTextOutput(stringwriter);
plainTextOutput.setUnicodeOutputEnabled(decompilationOptions.getSettings().isUnicodeOutputEnabled());
settings.getLanguage().decompileType(type, plainTextOutput, decompilationOptions);
setContentPreserveLastScrollPosition(stringwriter.toString());
this.isContentValid = true;
}
private void decompileWithNavigationLinks() {
this.invalidateContent();
DecompilerLinkProvider newLinkProvider = new DecompilerLinkProvider();
newLinkProvider.setDecompilerReferences(metadataSystem, settings, decompilationOptions);
newLinkProvider.setType(type);
linkProvider = newLinkProvider;
linkProvider.generateContent();
setContentPreserveLastScrollPosition(linkProvider.getTextContent());
this.isContentValid = true;
enableLinks();
}
private void setContentPreserveLastScrollPosition(final String content) {
final Double scrollPercent = lastScrollPercent;
if (scrollPercent != null && initialNavigationLink == null) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
textArea.setText(content);
restoreScrollPosition(scrollPercent);
}
});
} else {
textArea.setText(content);
}
}
private void restoreScrollPosition(final double position) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
JScrollBar verticalScrollbar = scrollPane.getVerticalScrollBar();
if (verticalScrollbar == null)
return;
int scrollMax = verticalScrollbar.getMaximum() - verticalScrollbar.getMinimum();
long newScrollValue = Math.round(position * scrollMax) + verticalScrollbar.getMinimum();
if (newScrollValue < verticalScrollbar.getMinimum())
newScrollValue = verticalScrollbar.getMinimum();
if (newScrollValue > verticalScrollbar.getMaximum())
newScrollValue = verticalScrollbar.getMaximum();
verticalScrollbar.setValue((int) newScrollValue);
}
});
}
private void enableLinks() {
if (initialNavigationLink != null) {
doEnableLinks();
} else {
new Thread(new Runnable() {
@Override
public void run() {
try {
isWaitForLinksCursor = true;
doEnableLinks();
} finally {
isWaitForLinksCursor = false;
resetCursor();
}
}
}).start();
}
}
private void resetCursor() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
textArea.setCursor(new Cursor(Cursor.DEFAULT_CURSOR));
}
});
}
private void doEnableLinks() {
isNavigationLinksValid = false;
linkProvider.processLinks();
buildSelectionToUniqueStrTreeMap();
clearLinksCache();
isNavigationLinksValid = true;
textArea.setHyperlinksEnabled(true);
warmUpWithFirstLink();
}
private void warmUpWithFirstLink() {
if (selectionToUniqueStrTreeMap.keySet().size() > 0) {
Selection selection = selectionToUniqueStrTreeMap.keySet().iterator().next();
getLinkDescriptionForOffset(selection.from);
}
}
public void clearLinksCache() {
try {
isNavigableCache.clear();
readableLinksCache.clear();
} catch (Exception e) {
e.printStackTrace();
}
}
private void buildSelectionToUniqueStrTreeMap() {
TreeMap<Selection, String> treeMap = new TreeMap<>();
Map<String, Selection> definitionToSelectionMap = linkProvider.getDefinitionToSelectionMap();
Map<String, Set<Selection>> referenceToSelectionsMap = linkProvider.getReferenceToSelectionsMap();
for (String key : definitionToSelectionMap.keySet()) {
Selection selection = definitionToSelectionMap.get(key);
treeMap.put(selection, key);
}
for (String key : referenceToSelectionsMap.keySet()) {
for (Selection selection : referenceToSelectionsMap.get(key)) {
treeMap.put(selection, key);
}
}
selectionToUniqueStrTreeMap = treeMap;
}
private Selection getSelectionForOffset(int offset) {
if (isNavigationLinksValid) {
Selection offsetSelection = new Selection(offset, offset);
Selection floorSelection = selectionToUniqueStrTreeMap.floorKey(offsetSelection);
if (floorSelection != null && floorSelection.from <= offset && floorSelection.to > offset) {
return floorSelection;
}
}
return null;
}
private String getUniqueStrForOffset(int offset) {
Selection selection = getSelectionForOffset(offset);
if (selection != null) {
String uniqueStr = selectionToUniqueStrTreeMap.get(selection);
if (this.isLinkNavigable(uniqueStr) && this.getLinkDescription(uniqueStr) != null) {
return uniqueStr;
}
}
return null;
}
private Integer getSelectionFromForOffset(int offset) {
Selection selection = getSelectionForOffset(offset);
if (selection != null) {
return selection.from;
}
return null;
}
private String getLinkDescriptionForOffset(int offset) {
String uniqueStr = getUniqueStrForOffset(offset);
if (uniqueStr != null) {
String description = this.getLinkDescription(uniqueStr);
if (description != null) {
return description;
}
}
return null;
}
private boolean isLinkNavigable(String uniqueStr) {
try {
Boolean isNavigableCached = isNavigableCache.get(uniqueStr);
if (isNavigableCached != null)
return isNavigableCached;
boolean isNavigable = linkProvider.isLinkNavigable(uniqueStr);
isNavigableCache.put(uniqueStr, isNavigable);
return isNavigable;
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
private String getLinkDescription(String uniqueStr) {
try {
String descriptionCached = readableLinksCache.get(uniqueStr);
if (descriptionCached != null)
return descriptionCached;
String description = linkProvider.getLinkDescription(uniqueStr);
if (description != null && description.trim().length() > 0) {
readableLinksCache.put(uniqueStr, description);
return description;
}
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
private void onNavigationClicked(String clickedReferenceUniqueStr) {
if (isLocallyNavigable(clickedReferenceUniqueStr)) {
onLocalNavigationRequest(clickedReferenceUniqueStr);
} else if (linkProvider.isLinkNavigable(clickedReferenceUniqueStr)) {
onOutboundNavigationRequest(clickedReferenceUniqueStr);
} else {
JLabel label = this.mainWindow.getLabel();
if (label == null)
return;
String[] linkParts = clickedReferenceUniqueStr.split("\\|");
if (linkParts.length <= 1) {
label.setText("Cannot navigate: " + clickedReferenceUniqueStr);
return;
}
String destinationTypeStr = linkParts[1];
label.setText("Cannot navigate: " + destinationTypeStr.replaceAll("/", "."));
}
}
private boolean isLocallyNavigable(String uniqueStr) {
return linkProvider.getDefinitionToSelectionMap().keySet().contains(uniqueStr);
}
private void onLocalNavigationRequest(String uniqueStr) {
try {
Selection selection = linkProvider.getDefinitionToSelectionMap().get(uniqueStr);
doLocalNavigation(selection);
} catch (Exception e) {
e.printStackTrace();
}
}
private void doLocalNavigation(Selection selection) {
try {
textArea.requestFocusInWindow();
if (selection != null) {
textArea.setSelectionStart(selection.from);
textArea.setSelectionEnd(selection.to);
scrollToSelection(selection.from);
} else {
textArea.setSelectionStart(0);
textArea.setSelectionEnd(0);
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void scrollToSelection(final int selectionBeginningOffset) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
try {
int fullHeight = textArea.getBounds().height;
int viewportHeight = textArea.getVisibleRect().height;
int viewportLineCount = viewportHeight / textArea.getLineHeight();
int selectionLineNum = textArea.getLineOfOffset(selectionBeginningOffset);
int upperMarginToScroll = Math.round(viewportLineCount * 0.29f);
int upperLineToSet = selectionLineNum - upperMarginToScroll;
int currentUpperLine = textArea.getVisibleRect().y / textArea.getLineHeight();
if (selectionLineNum <= currentUpperLine + 2 ||
selectionLineNum >= currentUpperLine + viewportLineCount - 4) {
Rectangle rectToScroll = new Rectangle();
rectToScroll.x = 0;
rectToScroll.width = 1;
rectToScroll.y = Math.max(upperLineToSet * textArea.getLineHeight(), 0);
rectToScroll.height = Math.min(viewportHeight, fullHeight - rectToScroll.y);
textArea.scrollRectToVisible(rectToScroll);
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
private void onOutboundNavigationRequest(String uniqueStr) {
mainWindow.onNavigationRequest(uniqueStr);
}
public void setDecompilerReferences(MetadataSystem metadataSystem,
DecompilerSettings settings,
DecompilationOptions decompilationOptions) {
this.metadataSystem = metadataSystem;
this.settings = settings;
this.decompilationOptions = decompilationOptions;
}
public TypeDefinition getType() {
return type;
}
public void setType(TypeDefinition type) {
this.type = type;
}
public boolean isContentValid() {
return isContentValid;
}
public void invalidateContent() {
try {
this.setContent("");
} finally {
this.isContentValid = false;
this.isNavigationLinksValid = false;
}
}
public void resetScrollPosition() {
lastScrollPercent = null;
}
public void setInitialNavigationLink(String initialNavigationLink) {
this.initialNavigationLink = initialNavigationLink;
}
public void onAddedToScreen() {
try {
if (initialNavigationLink != null) {
onLocalNavigationRequest(initialNavigationLink);
} else if (isFirstTimeRun) {
// warm up scrolling
isFirstTimeRun = false;
doLocalNavigation(new Selection(0, 0));
}
} finally {
initialNavigationLink = null;
}
}
/**
* sun.swing.CachedPainter holds on OpenFile for a while
* even after JTabbedPane.remove(component)
*/
public void close() {
linkProvider = null;
type = null;
invalidateContent();
clearLinksCache();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OpenFile other = (OpenFile) obj;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
return true;
}
}
| |
package org.apache.helix.controller.stages;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
import org.apache.helix.HelixException;
import org.apache.helix.HelixManager;
import org.apache.helix.HelixRebalanceException;
import org.apache.helix.controller.LogUtil;
import org.apache.helix.controller.dataproviders.ResourceControllerDataProvider;
import org.apache.helix.controller.pipeline.AbstractBaseStage;
import org.apache.helix.controller.pipeline.StageException;
import org.apache.helix.controller.rebalancer.AutoRebalancer;
import org.apache.helix.controller.rebalancer.CustomRebalancer;
import org.apache.helix.controller.rebalancer.MaintenanceRebalancer;
import org.apache.helix.controller.rebalancer.Rebalancer;
import org.apache.helix.controller.rebalancer.SemiAutoRebalancer;
import org.apache.helix.controller.rebalancer.internal.MappingCalculator;
import org.apache.helix.controller.rebalancer.waged.WagedRebalancer;
import org.apache.helix.model.ClusterConfig;
import org.apache.helix.model.IdealState;
import org.apache.helix.model.InstanceConfig;
import org.apache.helix.model.MaintenanceSignal;
import org.apache.helix.model.Partition;
import org.apache.helix.model.Resource;
import org.apache.helix.model.ResourceAssignment;
import org.apache.helix.model.StateModelDefinition;
import org.apache.helix.monitoring.mbeans.ClusterStatusMonitor;
import org.apache.helix.monitoring.mbeans.ResourceMonitor;
import org.apache.helix.task.TaskConstants;
import org.apache.helix.util.HelixUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* For partition compute best possible (instance,state) pair based on
* IdealState,StateModel,LiveInstance
*/
public class BestPossibleStateCalcStage extends AbstractBaseStage {
private static final Logger logger =
LoggerFactory.getLogger(BestPossibleStateCalcStage.class.getName());
@Override
public void process(ClusterEvent event) throws Exception {
_eventId = event.getEventId();
CurrentStateOutput currentStateOutput = event.getAttribute(AttributeName.CURRENT_STATE.name());
final Map<String, Resource> resourceMap =
event.getAttribute(AttributeName.RESOURCES_TO_REBALANCE.name());
final ClusterStatusMonitor clusterStatusMonitor =
event.getAttribute(AttributeName.clusterStatusMonitor.name());
ResourceControllerDataProvider cache =
event.getAttribute(AttributeName.ControllerDataProvider.name());
if (currentStateOutput == null || resourceMap == null || cache == null) {
throw new StageException(
"Missing attributes in event:" + event + ". Requires CURRENT_STATE|RESOURCES|DataCache");
}
final BestPossibleStateOutput bestPossibleStateOutput =
compute(event, resourceMap, currentStateOutput);
event.addAttribute(AttributeName.BEST_POSSIBLE_STATE.name(), bestPossibleStateOutput);
final Map<String, InstanceConfig> instanceConfigMap = cache.getInstanceConfigMap();
final Map<String, StateModelDefinition> stateModelDefMap = cache.getStateModelDefMap();
asyncExecute(cache.getAsyncTasksThreadPool(), new Callable<Object>() {
@Override
public Object call() {
try {
if (clusterStatusMonitor != null) {
clusterStatusMonitor
.setPerInstanceResourceStatus(bestPossibleStateOutput, instanceConfigMap,
resourceMap, stateModelDefMap);
}
} catch (Exception e) {
LogUtil.logError(logger, _eventId, "Could not update cluster status metrics!", e);
}
return null;
}
});
}
private BestPossibleStateOutput compute(ClusterEvent event, Map<String, Resource> resourceMap,
CurrentStateOutput currentStateOutput) {
ResourceControllerDataProvider cache =
event.getAttribute(AttributeName.ControllerDataProvider.name());
BestPossibleStateOutput output = new BestPossibleStateOutput();
HelixManager helixManager = event.getAttribute(AttributeName.helixmanager.name());
ClusterStatusMonitor clusterStatusMonitor =
event.getAttribute(AttributeName.clusterStatusMonitor.name());
WagedRebalancer wagedRebalancer = event.getAttribute(AttributeName.STATEFUL_REBALANCER.name());
// Check whether the offline/disabled instance count in the cluster reaches the set limit,
// if yes, pause the rebalancer.
boolean isValid =
validateOfflineInstancesLimit(cache, event.getAttribute(AttributeName.helixmanager.name()));
final List<String> failureResources = new ArrayList<>();
Map<String, Resource> calculatedResourceMap =
computeResourceBestPossibleStateWithWagedRebalancer(wagedRebalancer, cache,
currentStateOutput, resourceMap, output, failureResources);
Map<String, Resource> remainingResourceMap = new HashMap<>(resourceMap);
remainingResourceMap.keySet().removeAll(calculatedResourceMap.keySet());
// Fallback to the original single resource rebalancer calculation.
// This is required because we support mixed cluster that uses both WAGED rebalancer and the
// older rebalancers.
Iterator<Resource> itr = remainingResourceMap.values().iterator();
while (itr.hasNext()) {
Resource resource = itr.next();
boolean result = false;
try {
result = computeSingleResourceBestPossibleState(event, cache, currentStateOutput, resource,
output);
} catch (HelixException ex) {
LogUtil.logError(logger, _eventId, String
.format("Exception when calculating best possible states for %s",
resource.getResourceName()), ex);
}
if (!result) {
failureResources.add(resource.getResourceName());
LogUtil.logWarn(logger, _eventId, String
.format("Failed to calculate best possible states for %s", resource.getResourceName()));
}
}
// Check and report if resource rebalance has failure
updateRebalanceStatus(!isValid || !failureResources.isEmpty(), failureResources, helixManager,
cache, clusterStatusMonitor, String
.format("Failed to calculate best possible states for %d resources.",
failureResources.size()));
return output;
}
private void updateRebalanceStatus(final boolean hasFailure, final List<String> failedResources,
final HelixManager helixManager, final ResourceControllerDataProvider cache,
final ClusterStatusMonitor clusterStatusMonitor, final String errorMessage) {
asyncExecute(cache.getAsyncTasksThreadPool(), new Callable<Object>() {
@Override
public Object call() {
try {
if (hasFailure) {
/* TODO Enable this update when we resolve ZK server load issue. This will cause extra write to ZK.
if (_statusUpdateUtil != null) {
_statusUpdateUtil
.logError(StatusUpdateUtil.ErrorType.RebalanceResourceFailure, this.getClass(),
errorMessage, helixManager);
}
*/
LogUtil.logWarn(logger, _eventId, errorMessage);
}
if (clusterStatusMonitor != null) {
clusterStatusMonitor.setRebalanceFailureGauge(hasFailure);
clusterStatusMonitor.setResourceRebalanceStates(failedResources,
ResourceMonitor.RebalanceStatus.BEST_POSSIBLE_STATE_CAL_FAILED);
}
} catch (Exception e) {
LogUtil.logError(logger, _eventId, "Could not update cluster status!", e);
}
return null;
}
});
}
// Check whether the offline/disabled instance count in the cluster reaches the set limit,
// if yes, pause the rebalancer, and throw exception to terminate rebalance cycle.
private boolean validateOfflineInstancesLimit(final ResourceControllerDataProvider cache,
final HelixManager manager) {
int maxOfflineInstancesAllowed = cache.getClusterConfig().getMaxOfflineInstancesAllowed();
if (maxOfflineInstancesAllowed >= 0) {
int offlineCount = cache.getAllInstances().size() - cache.getEnabledLiveInstances().size();
if (offlineCount > maxOfflineInstancesAllowed) {
String errMsg = String.format(
"Offline Instances count %d greater than allowed count %d. Stop rebalance and put the cluster %s into maintenance mode.",
offlineCount, maxOfflineInstancesAllowed, cache.getClusterName());
if (manager != null) {
if (manager.getHelixDataAccessor()
.getProperty(manager.getHelixDataAccessor().keyBuilder().maintenance()) == null) {
manager.getClusterManagmentTool()
.autoEnableMaintenanceMode(manager.getClusterName(), true, errMsg,
MaintenanceSignal.AutoTriggerReason.MAX_OFFLINE_INSTANCES_EXCEEDED);
LogUtil.logWarn(logger, _eventId, errMsg);
}
} else {
LogUtil.logError(logger, _eventId, "Failed to put cluster " + cache.getClusterName()
+ " into maintenance mode, HelixManager is not set!");
}
return false;
}
}
return true;
}
private void updateWagedRebalancer(WagedRebalancer wagedRebalancer, ClusterConfig clusterConfig) {
if (clusterConfig != null) {
// Since the rebalance configuration can be updated at runtime, try to update the rebalancer
// before calculating.
wagedRebalancer.updateRebalancePreference(clusterConfig.getGlobalRebalancePreference());
wagedRebalancer
.setGlobalRebalanceAsyncMode(clusterConfig.isGlobalRebalanceAsyncModeEnabled());
}
}
/**
* Rebalance with the WAGED rebalancer
* The rebalancer only calculates the new ideal assignment for all the resources that are
* configured to use the WAGED rebalancer.
*
* @param wagedRebalancer The WAGED rebalancer instance.
* @param cache Cluster data cache.
* @param currentStateOutput The current state information.
* @param resourceMap The complete resource map. The method will filter the map for the compatible resources.
* @param output The best possible state output.
* @param failureResources The failure records that will be updated if any resource cannot be computed.
* @return The map of all the calculated resources.
*/
private Map<String, Resource> computeResourceBestPossibleStateWithWagedRebalancer(
WagedRebalancer wagedRebalancer, ResourceControllerDataProvider cache,
CurrentStateOutput currentStateOutput, Map<String, Resource> resourceMap,
BestPossibleStateOutput output, List<String> failureResources) {
if (cache.isMaintenanceModeEnabled()) {
// The WAGED rebalancer won't be used while maintenance mode is enabled.
return Collections.emptyMap();
}
// Find the compatible resources: 1. FULL_AUTO 2. Configured to use the WAGED rebalancer
Map<String, Resource> wagedRebalancedResourceMap =
resourceMap.entrySet().stream().filter(resourceEntry -> {
IdealState is = cache.getIdealState(resourceEntry.getKey());
return is != null && is.getRebalanceMode().equals(IdealState.RebalanceMode.FULL_AUTO)
&& WagedRebalancer.class.getName().equals(is.getRebalancerClassName());
}).collect(Collectors.toMap(resourceEntry -> resourceEntry.getKey(),
resourceEntry -> resourceEntry.getValue()));
Map<String, IdealState> newIdealStates = new HashMap<>();
if (wagedRebalancer != null) {
updateWagedRebalancer(wagedRebalancer, cache.getClusterConfig());
try {
newIdealStates.putAll(wagedRebalancer
.computeNewIdealStates(cache, wagedRebalancedResourceMap, currentStateOutput));
} catch (HelixRebalanceException ex) {
// Note that unlike the legacy rebalancer, the WAGED rebalance won't return partial result.
// Since it calculates for all the eligible resources globally, a partial result is invalid.
// TODO propagate the rebalancer failure information to updateRebalanceStatus for monitoring.
LogUtil.logError(logger, _eventId, String
.format("Failed to calculate the new Ideal States using the rebalancer %s due to %s",
wagedRebalancer.getClass().getSimpleName(), ex.getFailureType()), ex);
}
} else {
LogUtil.logWarn(logger, _eventId,
"Skip rebalancing using the WAGED rebalancer since it is not configured in the rebalance pipeline.");
}
Iterator<Resource> itr = wagedRebalancedResourceMap.values().iterator();
while (itr.hasNext()) {
Resource resource = itr.next();
IdealState is = newIdealStates.get(resource.getResourceName());
// Check if the WAGED rebalancer has calculated the result for this resource or not.
if (is != null && checkBestPossibleStateCalculation(is)) {
// The WAGED rebalancer calculates a valid result, record in the output
updateBestPossibleStateOutput(output, resource, is);
} else {
failureResources.add(resource.getResourceName());
LogUtil.logWarn(logger, _eventId, String
.format("Failed to calculate best possible states for %s.",
resource.getResourceName()));
}
}
return wagedRebalancedResourceMap;
}
private void updateBestPossibleStateOutput(BestPossibleStateOutput output, Resource resource,
IdealState computedIdealState) {
output.setPreferenceLists(resource.getResourceName(), computedIdealState.getPreferenceLists());
for (Partition partition : resource.getPartitions()) {
Map<String, String> newStateMap =
computedIdealState.getInstanceStateMap(partition.getPartitionName());
output.setState(resource.getResourceName(), partition, newStateMap);
}
}
private boolean computeSingleResourceBestPossibleState(ClusterEvent event,
ResourceControllerDataProvider cache, CurrentStateOutput currentStateOutput,
Resource resource, BestPossibleStateOutput output) {
// for each ideal state
// read the state model def
// for each resource
// get the preference list
// for each instanceName check if its alive then assign a state
String resourceName = resource.getResourceName();
LogUtil.logDebug(logger, _eventId, "Processing resource:" + resourceName);
// Ideal state may be gone. In that case we need to get the state model name
// from the current state
IdealState idealState = cache.getIdealState(resourceName);
if (idealState == null) {
// if ideal state is deleted, use an empty one
LogUtil.logInfo(logger, _eventId, "resource:" + resourceName + " does not exist anymore");
idealState = new IdealState(resourceName);
idealState.setStateModelDefRef(resource.getStateModelDefRef());
}
// Skip resources are tasks for regular pipeline
if (idealState.getStateModelDefRef().equals(TaskConstants.STATE_MODEL_NAME)) {
LogUtil.logWarn(logger, _eventId, String
.format("Resource %s should not be processed by %s pipeline", resourceName,
cache.getPipelineName()));
return false;
}
Rebalancer<ResourceControllerDataProvider> rebalancer =
getRebalancer(idealState, resourceName, cache.isMaintenanceModeEnabled());
MappingCalculator<ResourceControllerDataProvider> mappingCalculator =
getMappingCalculator(rebalancer, resourceName);
if (rebalancer == null || mappingCalculator == null) {
LogUtil.logError(logger, _eventId, "Error computing assignment for resource " + resourceName
+ ". no rebalancer found. rebalancer: " + rebalancer + " mappingCalculator: "
+ mappingCalculator);
}
if (rebalancer != null && mappingCalculator != null) {
ResourceAssignment partitionStateAssignment = null;
try {
HelixManager manager = event.getAttribute(AttributeName.helixmanager.name());
rebalancer.init(manager);
idealState =
rebalancer.computeNewIdealState(resourceName, idealState, currentStateOutput, cache);
output.setPreferenceLists(resourceName, idealState.getPreferenceLists());
// Use the internal MappingCalculator interface to compute the final assignment
// The next release will support rebalancers that compute the mapping from start to finish
partitionStateAssignment = mappingCalculator
.computeBestPossiblePartitionState(cache, idealState, resource, currentStateOutput);
if (partitionStateAssignment == null) {
LogUtil.logWarn(logger, _eventId,
"PartitionStateAssignment is null, resource: " + resourceName);
return false;
}
for (Partition partition : resource.getPartitions()) {
Map<String, String> newStateMap = partitionStateAssignment.getReplicaMap(partition);
output.setState(resourceName, partition, newStateMap);
}
// Check if calculation is done successfully
return checkBestPossibleStateCalculation(idealState);
} catch (HelixException e) {
// No eligible instance is found.
LogUtil.logError(logger, _eventId, e.getMessage());
} catch (Exception e) {
LogUtil.logError(logger, _eventId,
"Error computing assignment for resource " + resourceName + ". Skipping." , e);
}
}
// Exception or rebalancer is not found
return false;
}
private boolean checkBestPossibleStateCalculation(IdealState idealState) {
// If replicas is 0, indicate the resource is not fully initialized or ready to be rebalanced
if (idealState.getRebalanceMode() == IdealState.RebalanceMode.FULL_AUTO && !idealState
.getReplicas().equals("0")) {
Map<String, List<String>> preferenceLists = idealState.getPreferenceLists();
if (preferenceLists == null || preferenceLists.isEmpty()) {
return false;
}
int emptyListCount = 0;
for (List<String> preferenceList : preferenceLists.values()) {
if (preferenceList.isEmpty()) {
emptyListCount++;
}
}
// If all lists are empty, rebalance fails completely
return emptyListCount != preferenceLists.values().size();
} else {
// For non FULL_AUTO RebalanceMode, rebalancing is not controlled by Helix
return true;
}
}
private Rebalancer<ResourceControllerDataProvider> getCustomizedRebalancer(
String rebalancerClassName, String resourceName) {
Rebalancer<ResourceControllerDataProvider> customizedRebalancer = null;
if (rebalancerClassName != null) {
if (logger.isDebugEnabled()) {
LogUtil.logDebug(logger, _eventId,
"resource " + resourceName + " use idealStateRebalancer " + rebalancerClassName);
}
try {
customizedRebalancer = Rebalancer.class
.cast(HelixUtil.loadClass(getClass(), rebalancerClassName).newInstance());
} catch (Exception e) {
LogUtil.logError(logger, _eventId,
"Exception while invoking custom rebalancer class:" + rebalancerClassName, e);
}
}
return customizedRebalancer;
}
private Rebalancer<ResourceControllerDataProvider> getRebalancer(IdealState idealState,
String resourceName, boolean isMaintenanceModeEnabled) {
Rebalancer<ResourceControllerDataProvider> rebalancer = null;
switch (idealState.getRebalanceMode()) {
case FULL_AUTO:
if (isMaintenanceModeEnabled) {
rebalancer = new MaintenanceRebalancer();
} else {
Rebalancer<ResourceControllerDataProvider> customizedRebalancer =
getCustomizedRebalancer(idealState.getRebalancerClassName(), resourceName);
if (customizedRebalancer != null) {
rebalancer = customizedRebalancer;
} else {
rebalancer = new AutoRebalancer();
}
}
break;
case SEMI_AUTO:
rebalancer = new SemiAutoRebalancer<>();
break;
case CUSTOMIZED:
rebalancer = new CustomRebalancer();
break;
case USER_DEFINED:
case TASK:
rebalancer = getCustomizedRebalancer(idealState.getRebalancerClassName(), resourceName);
break;
default:
LogUtil.logError(logger, _eventId,
"Fail to find the rebalancer, invalid rebalance mode " + idealState.getRebalanceMode());
break;
}
return rebalancer;
}
private MappingCalculator<ResourceControllerDataProvider> getMappingCalculator(
Rebalancer<ResourceControllerDataProvider> rebalancer, String resourceName) {
MappingCalculator<ResourceControllerDataProvider> mappingCalculator = null;
if (rebalancer != null) {
try {
mappingCalculator = MappingCalculator.class.cast(rebalancer);
} catch (ClassCastException e) {
LogUtil.logWarn(logger, _eventId,
"Rebalancer does not have a mapping calculator, defaulting to SEMI_AUTO, resource: "
+ resourceName);
}
}
if (mappingCalculator == null) {
mappingCalculator = new SemiAutoRebalancer<>();
}
return mappingCalculator;
}
}
| |
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014-2022 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.model;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import javax.persistence.Cacheable;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.Lob;
import javax.persistence.MapKeyColumn;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.UniqueConstraint;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.annotations.Type;
import org.jboss.pnc.enums.SystemImageType;
/**
* The BuildEnvironment, selected by the Environment Driver to run a build, based on the buildConfiguration requirements
*
* @author avibelli
*/
@Cacheable
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
@Entity
@Table(
uniqueConstraints = { @UniqueConstraint(
name = "uk_buildenvironment_imageid_imagerepositoryurl",
columnNames = { "systemImageId", "systemImageRepositoryUrl" }) },
indexes = { @Index(name = "idx_buildenvironment_systemimageid", columnList = "systemimageid") })
public class BuildEnvironment implements GenericEntity<Integer> {
private static final long serialVersionUID = 3170247997550146257L;
public static final String SEQUENCE_NAME = "build_system_image_id_seq";
@Id
@SequenceGenerator(name = SEQUENCE_NAME, sequenceName = SEQUENCE_NAME, initialValue = 100, allocationSize = 1)
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = SEQUENCE_NAME)
private Integer id;
@NotNull
@Size(max = 255)
private String name;
@Lob
@Type(type = "org.hibernate.type.TextType")
private String description;
/**
* The URL of the repository which contains the build system image.
*/
@NotNull
@Size(max = 255)
@Column(updatable = false)
private String systemImageRepositoryUrl;
/**
* A unique identifier such representing the system image, for example a Docker container ID or a checksum of a VM
* image. This must never be modified to ensure build reproducibility.
*/
@NotNull
@Column(updatable = false)
@Size(max = 255)
private String systemImageId;
@NotNull
@Column(updatable = false)
@Enumerated(EnumType.STRING)
private SystemImageType systemImageType;
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
@ElementCollection(fetch = FetchType.EAGER)
@CollectionTable(
name = "build_environment_attributes",
joinColumns = @JoinColumn(
name = "build_environment_id",
foreignKey = @ForeignKey(name = "fk_build_environment_attributes_buildenvironment")))
@MapKeyColumn(name = "name")
@Column(name = "value")
private Map<String, String> attributes = new HashMap<>();
@NotNull
private boolean deprecated = false;
@NotNull
private boolean hidden = false;
public BuildEnvironment() {
}
@Override
public Integer getId() {
return id;
}
@Override
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getSystemImageRepositoryUrl() {
return systemImageRepositoryUrl;
}
public void setSystemImageRepositoryUrl(String systemImageRepositoryUrl) {
this.systemImageRepositoryUrl = systemImageRepositoryUrl;
}
public String getSystemImageId() {
return systemImageId;
}
public Map<String, String> getAttributes() {
return attributes;
}
public void setAttributes(Map<String, String> attributes) {
this.attributes = attributes;
}
public String getAttribute(String key) {
return attributes.get(key);
}
public String putAttribute(String key, String value) {
return attributes.put(key, value);
}
public SystemImageType getSystemImageType() {
return systemImageType;
}
public void setSystemImageType(SystemImageType systemImageType) {
this.systemImageType = systemImageType;
}
public boolean isDeprecated() {
return deprecated;
}
public void setDeprecated(boolean deprecated) {
this.deprecated = deprecated;
}
public boolean isHidden() {
return hidden;
}
public void setHidden(boolean hidden) {
this.hidden = hidden;
}
@Override
public String toString() {
return "Build Environment [name: " + name + ", image id: " + this.systemImageId + "]";
}
// Needed for mapstruct to be able to use builders for immutable types (systemImageId)
public static BuildEnvironment.Builder builder() {
return Builder.newBuilder();
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (!(o instanceof BuildEnvironment))
return false;
BuildEnvironment that = (BuildEnvironment) o;
return systemImageId.equals(that.getSystemImageId())
&& systemImageRepositoryUrl.equals(that.getSystemImageRepositoryUrl());
}
@Override
public int hashCode() {
return Objects.hash(systemImageId, systemImageRepositoryUrl);
}
public static class Builder {
private Integer id;
private String name;
private String description;
private String systemImageRepositoryUrl;
private String systemImageId;
private Map<String, String> attributes = new HashMap<>();
private SystemImageType systemImageType;
private Boolean deprecated = false;
private Boolean hidden = false;
private Builder() {
}
public static Builder newBuilder() {
return new Builder();
}
public BuildEnvironment build() {
BuildEnvironment buildEnvironment = new BuildEnvironment();
buildEnvironment.setId(id);
buildEnvironment.setName(name);
buildEnvironment.setDescription(description);
buildEnvironment.setSystemImageRepositoryUrl(systemImageRepositoryUrl);
buildEnvironment.systemImageId = systemImageId;
buildEnvironment.setAttributes(attributes);
buildEnvironment.setSystemImageType(systemImageType);
buildEnvironment.deprecated = deprecated;
buildEnvironment.hidden = hidden;
return buildEnvironment;
}
public Builder id(Integer id) {
this.id = id;
return this;
}
public Builder name(String name) {
this.name = name;
return this;
}
public Builder description(String description) {
this.description = description;
return this;
}
public Builder systemImageRepositoryUrl(String systemImageRepositoryUrl) {
this.systemImageRepositoryUrl = systemImageRepositoryUrl;
return this;
}
public Builder systemImageId(String systemImageId) {
this.systemImageId = systemImageId;
return this;
}
public Builder attributes(Map<String, String> attributes) {
this.attributes = attributes;
return this;
}
public Builder attribute(String key, String value) {
this.attributes.put(key, value);
return this;
}
public Builder systemImageType(SystemImageType systemImageType) {
this.systemImageType = systemImageType;
return this;
}
public Builder deprecated(boolean isDeprecated) {
this.deprecated = isDeprecated;
return this;
}
public Builder hidden(boolean isHidden) {
this.hidden = isHidden;
return this;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.