gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2013 Eediom Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.araqne.logdb.metadata; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.felix.ipojo.annotations.Component; import org.apache.felix.ipojo.annotations.Invalidate; import org.apache.felix.ipojo.annotations.Requires; import org.apache.felix.ipojo.annotations.Validate; import org.araqne.log.api.FieldDefinition; import org.araqne.logdb.AccountService; import org.araqne.logdb.FieldOrdering; import org.araqne.logdb.FunctionRegistry; import org.araqne.logdb.MetadataCallback; import org.araqne.logdb.MetadataProvider; import org.araqne.logdb.MetadataService; import org.araqne.logdb.Permission; import org.araqne.logdb.Privilege; import org.araqne.logdb.QueryContext; import org.araqne.logdb.Row; import org.araqne.logdb.SecurityGroup; import org.araqne.logdb.query.parser.CommandOptions; import org.araqne.logdb.query.parser.ParseResult; import org.araqne.logdb.query.parser.QueryTokenizer; import org.araqne.logstorage.LockKey; import org.araqne.logstorage.LockStatus; import org.araqne.logstorage.LogFileService; import org.araqne.logstorage.LogFileServiceRegistry; import org.araqne.logstorage.LogRetentionPolicy; import org.araqne.logstorage.LogStorage; import org.araqne.logstorage.LogTableRegistry; import org.araqne.logstorage.StorageConfig; import org.araqne.logstorage.TableConfig; import org.araqne.logstorage.TableSchema; import org.araqne.storage.api.FilePath; @Component(name = "logdb-table-metadata") public class TableMetadataProvider implements MetadataProvider, FieldOrdering { @Requires private LogTableRegistry tableRegistry; @Requires private AccountService accountService; @Requires private LogFileServiceRegistry lfsRegistry; @Requires private LogStorage storage; @Requires private MetadataService metadataService; @Requires private FunctionRegistry functionRegistry; @Validate public void start() { metadataService.addProvider(this); } @Invalidate public void stop() { if (metadataService != null) metadataService.removeProvider(this); } @Override public String getType() { return "tables"; } @Override public void verify(QueryContext context, String queryString) { QueryTokenizer.parseOptions(context, queryString, 0, Arrays.asList("verbose"), functionRegistry); } @SuppressWarnings("unchecked") @Override public void query(QueryContext context, String queryString, MetadataCallback callback) { ParseResult r = QueryTokenizer.parseOptions(context, queryString, 0, Arrays.asList("verbose"), functionRegistry); Map<String, String> options = (Map<String, String>) r.value; boolean verbose = CommandOptions.parseBoolean(options.get("verbose")); int next = r.next; queryString = queryString.substring(next).trim(); List<String> targetTables = MetadataQueryStringParser.getFilteredTableNames(context.getSession(), tableRegistry, accountService, queryString); for (String tableName : tableRegistry.getTableNames()) { if (targetTables.contains(tableName)) if (accountService.checkPermission(context.getSession(), tableName, Permission.READ)) writeTableInfo(context, tableName, verbose, callback); } } private void writeTableInfo(QueryContext context, String tableName, boolean verbose, MetadataCallback callback) { Map<String, Object> m = new HashMap<String, Object>(); m.put("table", tableName); TableSchema s = tableRegistry.getTableSchema(tableName); StorageConfig primaryStorage = s.getPrimaryStorage(); LogFileService lfs = lfsRegistry.getLogFileService(primaryStorage.getType()); // primary storage m.put("primary_configs", marshal(lfs, s.getPrimaryStorage())); // replica storage m.put("replica_configs", marshal(lfs, s.getReplicaStorage())); // field definitions List<FieldDefinition> fields = s.getFieldDefinitions(); if (fields != null) { for (FieldDefinition field : fields) { String line = null; if (field.getLength() > 0) line = field.getName() + "\t" + field.getType() + "(" + field.getLength() + ")"; line = field.getName() + "\t" + field.getType(); m.put("fields", line); } } m.put("metadata", s.getMetadata()); // retention pollicy LogRetentionPolicy retentionPolicy = storage.getRetentionPolicy(tableName); String retention = null; if (retentionPolicy != null && retentionPolicy.getRetentionDays() > 0) retention = retentionPolicy.getRetentionDays() + "days"; m.put("retention_policy", retention); m.put("data_path", storage.getTableDirectory(tableName).getAbsolutePath()); LockStatus status = storage.lockStatus(new LockKey("script", tableName, null)); m.put("is_locked", status.isLocked()); if (status.isLocked()) { m.put("lock_owner", status.getOwner()); m.put("lock_purpose", status.getPurposes().toArray(new String[0])); m.put("lock_reentcnt", status.getReentrantCount()); } else { m.put("lock_owner", null); m.put("lock_purpose", null); m.put("lock_reentcnt", null); } List<Object> privileges = new ArrayList<Object>(); for (Privilege p : accountService.getPrivileges(context.getSession(), null)) { if (!p.getTableName().equals(tableName)) continue; Map<String, Object> privilege = new HashMap<String, Object>(); privilege.put("login_name", p.getLoginName()); List<String> permissions = new ArrayList<String>(); for (Permission permission : p.getPermissions()) permissions.add(permission.toString()); privilege.put("permissions", permissions); privileges.add(privilege); } m.put("privileges", privileges); List<Map<String, Object>> groups = new ArrayList<Map<String, Object>>(); for (SecurityGroup sg : accountService.getSecurityGroups()) { if (sg.getReadableTables().contains(tableName)) { Map<String, Object> group = new HashMap<String, Object>(); group.put("guid", sg.getGuid()); group.put("name", sg.getName()); group.put("description", sg.getDescription()); group.put("created", sg.getCreated()); group.put("updated", sg.getUpdated()); groups.add(group); } } m.put("security_groups", groups); if (verbose) setDetail(tableName, m); callback.onPush(new Row(m)); } private void setDetail(String tableName, Map<String, Object> m) { List<Date> logDates = new ArrayList<Date>(storage.getLogDates(tableName)); if (logDates.size() > 0) { m.put("min_day", logDates.get(logDates.size() - 1)); m.put("max_day", logDates.get(0)); } FilePath dir = storage.getTableDirectory(tableName); m.put("disk_usage", getConsumption(dir)); } private long getConsumption(FilePath dir) { long total = 0; FilePath[] files = dir.listFiles(); if (files == null) return 0; for (FilePath f : files) total += f.length(); return total; } private Map<String, Object> marshal(LogFileService lfs, StorageConfig storageConfig) { if (storageConfig == null) return null; Map<String, Object> m = new HashMap<String, Object>(); m.put("type", storageConfig.getType()); m.put("base_path", storageConfig.getBasePath()); Map<String, String> configs = new HashMap<String, String>(); for (TableConfig c : storageConfig.getConfigs()) { String value = null; if (c != null && c.getValues().size() > 1) value = c.getValues().toString(); else if (c != null) value = c.getValue(); configs.put(c.getKey(), value); } m.put("configs", configs); return m; } @Override public List<String> getFieldOrder() { return Arrays.asList("table", "compression", "crypto", "metadata", "replication_mode", "replication_table", "lock_owner", "lock_purpose", "lock_reentcnt", "retention_policy", "data_path"); } }
/* * Copyright 2012 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.abcdroid.devfest12.ui; import net.abcdroid.devfest12.util.UIUtils; import com.google.analytics.tracking.android.EasyTracker; import net.abcdroid.devfest12.BuildConfig; import net.abcdroid.devfest12.R; import com.actionbarsherlock.app.SherlockFragment; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuInflater; import com.actionbarsherlock.view.MenuItem; import android.app.Activity; import android.content.Intent; import android.graphics.Bitmap; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.webkit.ConsoleMessage; import android.webkit.WebChromeClient; import android.webkit.WebView; import android.webkit.WebViewClient; import android.widget.Toast; import static net.abcdroid.devfest12.util.LogUtils.LOGD; import static net.abcdroid.devfest12.util.LogUtils.LOGE; import static net.abcdroid.devfest12.util.LogUtils.makeLogTag; /** * Shows a {@link WebView} with a map of the conference venue. */ public class MapFragment extends SherlockFragment { private static final String TAG = makeLogTag(MapFragment.class); /** * When specified, will automatically point the map to the requested room. */ public static final String EXTRA_ROOM = "com.google.android.iosched.extra.ROOM"; private static final String SYSTEM_FEATURE_MULTITOUCH = "android.hardware.touchscreen.multitouch"; private static final String MAP_JSI_NAME = "MAP_CONTAINER"; private static final String MAP_URL = "https://maps.google.com/maps?q=USMP,+FACULTAD+DE+MEDICINA+HUMANA&hl=es&sll=-0.878872,-68.291016&sspn=68.681205,79.013672&hq=USMP,+FACULTAD+DE+MEDICINA+HUMANA&t=m&z=17"; private static boolean CLEAR_CACHE_ON_LOAD = BuildConfig.DEBUG; private WebView mWebView; private View mLoadingSpinner; private boolean mMapInitialized = false; public interface Callbacks { public void onRoomSelected(String roomId); } private static Callbacks sDummyCallbacks = new Callbacks() { @Override public void onRoomSelected(String roomId) { } }; private Callbacks mCallbacks = sDummyCallbacks; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setHasOptionsMenu(true); EasyTracker.getTracker().trackView("Map"); LOGD("Tracker", "Map"); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { ViewGroup root = (ViewGroup) inflater.inflate(R.layout.fragment_webview_with_spinner, container, false); mLoadingSpinner = root.findViewById(R.id.loading_spinner); mWebView = (WebView) root.findViewById(R.id.webview); mWebView.setWebChromeClient(mWebChromeClient); mWebView.setWebViewClient(mWebViewClient); return root; } @Override public void onViewCreated(View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); // Initialize web view if (CLEAR_CACHE_ON_LOAD) { mWebView.clearCache(true); } boolean hideZoomControls = getActivity().getPackageManager().hasSystemFeature(SYSTEM_FEATURE_MULTITOUCH) && UIUtils.hasHoneycomb(); mWebView.getSettings().setJavaScriptEnabled(true); mWebView.getSettings().setJavaScriptCanOpenWindowsAutomatically(false); mWebView.loadUrl(MAP_URL + "?multitouch=" + (hideZoomControls ? 1 : 0)); mWebView.addJavascriptInterface(mMapJsiImpl, MAP_JSI_NAME); } @Override public void onAttach(Activity activity) { super.onAttach(activity); if (!(activity instanceof Callbacks)) { throw new ClassCastException("Activity must implement fragment's callbacks."); } mCallbacks = (Callbacks) activity; } @Override public void onDetach() { super.onDetach(); mCallbacks = sDummyCallbacks; } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); inflater.inflate(R.menu.map, menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == R.id.menu_refresh) { mWebView.reload(); return true; } return super.onOptionsItemSelected(item); } private void runJs(final String js) { Activity activity = getActivity(); if (activity == null) { return; } activity.runOnUiThread(new Runnable() { @Override public void run() { LOGD(TAG, "Loading javascript:" + js); mWebView.loadUrl("javascript:" + js); } }); } /** * Helper method to escape JavaScript strings. Useful when passing strings to a WebView via * "javascript:" calls. */ private static String escapeJsString(String s) { if (s == null) { return ""; } return s.replace("'", "\\'").replace("\"", "\\\""); } public void panBy(float xFraction, float yFraction) { runJs("IoMap.panBy(" + xFraction + "," + yFraction + ");"); } /** * I/O Conference Map JavaScript interface. */ private interface MapJsi { public void openContentInfo(final String roomId); public void onMapReady(); } private final WebChromeClient mWebChromeClient = new WebChromeClient() { @Override public boolean onConsoleMessage(ConsoleMessage consoleMessage) { LOGD(TAG, "JS Console message: (" + consoleMessage.sourceId() + ": " + consoleMessage.lineNumber() + ") " + consoleMessage.message()); return false; } }; private final WebViewClient mWebViewClient = new WebViewClient() { @Override public void onPageStarted(WebView view, String url, Bitmap favicon) { super.onPageStarted(view, url, favicon); mLoadingSpinner.setVisibility(View.VISIBLE); mWebView.setVisibility(View.INVISIBLE); } @Override public void onPageFinished(WebView view, String url) { super.onPageFinished(view, url); mLoadingSpinner.setVisibility(View.GONE); mWebView.setVisibility(View.VISIBLE); } @Override public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) { LOGE(TAG, "Error " + errorCode + ": " + description); Toast.makeText(view.getContext(), "Error " + errorCode + ": " + description, Toast.LENGTH_LONG).show(); super.onReceivedError(view, errorCode, description, failingUrl); } }; private final MapJsi mMapJsiImpl = new MapJsi() { public void openContentInfo(final String roomId) { Activity activity = getActivity(); if (activity == null) { return; } activity.runOnUiThread(new Runnable() { @Override public void run() { mCallbacks.onRoomSelected(roomId); } }); } public void onMapReady() { LOGD(TAG, "onMapReady"); final Intent intent = BaseActivity.fragmentArgumentsToIntent(getArguments()); String showRoomId = null; if (!mMapInitialized && intent.hasExtra(EXTRA_ROOM)) { showRoomId = intent.getStringExtra(EXTRA_ROOM); } if (showRoomId != null) { runJs("IoMap.showLocationById('" + escapeJsString(showRoomId) + "');"); } mMapInitialized = true; } }; }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.ml.dataframe; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsSource; import java.util.HashMap; import java.util.Map; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class MappingsMergerTests extends ESTestCase { public void testMergeMappings_GivenIndicesWithIdenticalProperties() { Map<String, Object> index1Mappings = Map.of("properties", Map.of("field_1", "field_1_mappings", "field_2", "field_2_mappings")); MappingMetadata index1MappingMetadata = new MappingMetadata("_doc", index1Mappings); Map<String, Object> index2Mappings = Map.of("properties", Map.of("field_1", "field_1_mappings", "field_2", "field_2_mappings")); MappingMetadata index2MappingMetadata = new MappingMetadata("_doc", index2Mappings); ImmutableOpenMap.Builder<String, MappingMetadata> mappings = ImmutableOpenMap.builder(); mappings.put("index_1", index1MappingMetadata); mappings.put("index_2", index2MappingMetadata); GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); MappingMetadata mergedMappings = MappingsMerger.mergeMappings(newSource(), getMappingsResponse); Map<String, Object> expectedMappings = new HashMap<>(); expectedMappings.put("dynamic", false); expectedMappings.put("properties", index1Mappings.get("properties")); assertThat(mergedMappings.getSourceAsMap(), equalTo(expectedMappings)); } public void testMergeMappings_GivenPropertyFieldWithDifferentMapping() { Map<String, Object> index1Mappings = Map.of("properties", Map.of("field_1", "field_1_mappings")); MappingMetadata index1MappingMetadata = new MappingMetadata("_doc", index1Mappings); Map<String, Object> index2Mappings = Map.of("properties", Map.of("field_1", "different_field_1_mappings")); MappingMetadata index2MappingMetadata = new MappingMetadata("_doc", index2Mappings); ImmutableOpenMap.Builder<String, MappingMetadata> mappings = ImmutableOpenMap.builder(); mappings.put("index_1", index1MappingMetadata); mappings.put("index_2", index2MappingMetadata); GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); ElasticsearchStatusException e = expectThrows( ElasticsearchStatusException.class, () -> MappingsMerger.mergeMappings(newSource(), getMappingsResponse) ); assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); assertThat(e.getMessage(), containsString("cannot merge [properties] mappings because of differences for field [field_1]; ")); assertThat(e.getMessage(), containsString("mapped as [different_field_1_mappings] in index [index_2]")); assertThat(e.getMessage(), containsString("mapped as [field_1_mappings] in index [index_1]")); } public void testMergeMappings_GivenIndicesWithDifferentPropertiesButNoConflicts() { Map<String, Object> index1Mappings = Map.of("properties", Map.of("field_1", "field_1_mappings", "field_2", "field_2_mappings")); MappingMetadata index1MappingMetadata = new MappingMetadata("_doc", index1Mappings); Map<String, Object> index2Mappings = Map.of("properties", Map.of("field_1", "field_1_mappings", "field_3", "field_3_mappings")); MappingMetadata index2MappingMetadata = new MappingMetadata("_doc", index2Mappings); ImmutableOpenMap.Builder<String, MappingMetadata> mappings = ImmutableOpenMap.builder(); mappings.put("index_1", index1MappingMetadata); mappings.put("index_2", index2MappingMetadata); GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); MappingMetadata mergedMappings = MappingsMerger.mergeMappings(newSource(), getMappingsResponse); Map<String, Object> mappingsAsMap = mergedMappings.getSourceAsMap(); assertThat(mappingsAsMap.keySet(), containsInAnyOrder("dynamic", "properties")); assertThat(mappingsAsMap.get("dynamic"), equalTo(false)); @SuppressWarnings("unchecked") Map<String, Object> fieldMappings = (Map<String, Object>) mappingsAsMap.get("properties"); assertThat(fieldMappings.keySet(), containsInAnyOrder("field_1", "field_2", "field_3")); assertThat(fieldMappings.get("field_1"), equalTo("field_1_mappings")); assertThat(fieldMappings.get("field_2"), equalTo("field_2_mappings")); assertThat(fieldMappings.get("field_3"), equalTo("field_3_mappings")); } public void testMergeMappings_GivenIndicesWithIdenticalRuntimeFields() { Map<String, Object> index1Mappings = Map.of("runtime", Map.of("field_1", "field_1_mappings", "field_2", "field_2_mappings")); MappingMetadata index1MappingMetadata = new MappingMetadata("_doc", index1Mappings); Map<String, Object> index2Mappings = Map.of("runtime", Map.of("field_1", "field_1_mappings", "field_2", "field_2_mappings")); MappingMetadata index2MappingMetadata = new MappingMetadata("_doc", index2Mappings); ImmutableOpenMap.Builder<String, MappingMetadata> mappings = ImmutableOpenMap.builder(); mappings.put("index_1", index1MappingMetadata); mappings.put("index_2", index2MappingMetadata); GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); MappingMetadata mergedMappings = MappingsMerger.mergeMappings(newSource(), getMappingsResponse); Map<String, Object> expectedMappings = new HashMap<>(); expectedMappings.put("dynamic", false); expectedMappings.put("runtime", index1Mappings.get("runtime")); assertThat(mergedMappings.getSourceAsMap(), equalTo(expectedMappings)); } public void testMergeMappings_GivenRuntimeFieldWithDifferentMapping() { Map<String, Object> index1Mappings = Map.of("runtime", Map.of("field_1", "field_1_mappings")); MappingMetadata index1MappingMetadata = new MappingMetadata("_doc", index1Mappings); Map<String, Object> index2Mappings = Map.of("runtime", Map.of("field_1", "different_field_1_mappings")); MappingMetadata index2MappingMetadata = new MappingMetadata("_doc", index2Mappings); ImmutableOpenMap.Builder<String, MappingMetadata> mappings = ImmutableOpenMap.builder(); mappings.put("index_1", index1MappingMetadata); mappings.put("index_2", index2MappingMetadata); GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); ElasticsearchStatusException e = expectThrows( ElasticsearchStatusException.class, () -> MappingsMerger.mergeMappings(newSource(), getMappingsResponse) ); assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); assertThat(e.getMessage(), containsString("cannot merge [runtime] mappings because of differences for field [field_1]; ")); assertThat(e.getMessage(), containsString("mapped as [different_field_1_mappings] in index [index_2]")); assertThat(e.getMessage(), containsString("mapped as [field_1_mappings] in index [index_1]")); } public void testMergeMappings_GivenIndicesWithDifferentRuntimeFieldsButNoConflicts() { Map<String, Object> index1Mappings = Map.of("runtime", Map.of("field_1", "field_1_mappings", "field_2", "field_2_mappings")); MappingMetadata index1MappingMetadata = new MappingMetadata("_doc", index1Mappings); Map<String, Object> index2Mappings = Map.of("runtime", Map.of("field_1", "field_1_mappings", "field_3", "field_3_mappings")); MappingMetadata index2MappingMetadata = new MappingMetadata("_doc", index2Mappings); ImmutableOpenMap.Builder<String, MappingMetadata> mappings = ImmutableOpenMap.builder(); mappings.put("index_1", index1MappingMetadata); mappings.put("index_2", index2MappingMetadata); GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); MappingMetadata mergedMappings = MappingsMerger.mergeMappings(newSource(), getMappingsResponse); Map<String, Object> mappingsAsMap = mergedMappings.getSourceAsMap(); assertThat(mappingsAsMap.keySet(), containsInAnyOrder("dynamic", "runtime")); assertThat(mappingsAsMap.get("dynamic"), is(false)); @SuppressWarnings("unchecked") Map<String, Object> fieldMappings = (Map<String, Object>) mappingsAsMap.get("runtime"); assertThat(fieldMappings.keySet(), containsInAnyOrder("field_1", "field_2", "field_3")); assertThat(fieldMappings.get("field_1"), equalTo("field_1_mappings")); assertThat(fieldMappings.get("field_2"), equalTo("field_2_mappings")); assertThat(fieldMappings.get("field_3"), equalTo("field_3_mappings")); } public void testMergeMappings_GivenPropertyAndRuntimeFields() { Map<String, Object> index1Mappings = new HashMap<>(); { Map<String, Object> index1Properties = new HashMap<>(); index1Properties.put("p_1", "p_1_mappings"); Map<String, Object> index1Runtime = new HashMap<>(); index1Runtime.put("r_1", "r_1_mappings"); index1Mappings.put("properties", index1Properties); index1Mappings.put("runtime", index1Runtime); } MappingMetadata index1MappingMetadata = new MappingMetadata("_doc", index1Mappings); Map<String, Object> index2Mappings = new HashMap<>(); { Map<String, Object> index2Properties = new HashMap<>(); index2Properties.put("p_2", "p_2_mappings"); Map<String, Object> index2Runtime = new HashMap<>(); index2Runtime.put("r_2", "r_2_mappings"); index2Runtime.put("p_1", "p_1_different_mappings"); // It is ok to have conflicting runtime/property mappings index2Mappings.put("properties", index2Properties); index2Mappings.put("runtime", index2Runtime); } MappingMetadata index2MappingMetadata = new MappingMetadata("_doc", index2Mappings); ImmutableOpenMap.Builder<String, MappingMetadata> mappings = ImmutableOpenMap.builder(); mappings.put("index_1", index1MappingMetadata); mappings.put("index_2", index2MappingMetadata); GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); MappingMetadata mergedMappings = MappingsMerger.mergeMappings(newSource(), getMappingsResponse); Map<String, Object> mappingsAsMap = mergedMappings.getSourceAsMap(); assertThat(mappingsAsMap.keySet(), containsInAnyOrder("dynamic", "properties", "runtime")); assertThat(mappingsAsMap.get("dynamic"), is(false)); @SuppressWarnings("unchecked") Map<String, Object> mergedProperties = (Map<String, Object>) mappingsAsMap.get("properties"); assertThat(mergedProperties.keySet(), containsInAnyOrder("p_1", "p_2")); assertThat(mergedProperties.get("p_1"), equalTo("p_1_mappings")); assertThat(mergedProperties.get("p_2"), equalTo("p_2_mappings")); @SuppressWarnings("unchecked") Map<String, Object> mergedRuntime = (Map<String, Object>) mappingsAsMap.get("runtime"); assertThat(mergedRuntime.keySet(), containsInAnyOrder("r_1", "r_2", "p_1")); assertThat(mergedRuntime.get("r_1"), equalTo("r_1_mappings")); assertThat(mergedRuntime.get("r_2"), equalTo("r_2_mappings")); assertThat(mergedRuntime.get("p_1"), equalTo("p_1_different_mappings")); } public void testMergeMappings_GivenSourceFiltering() { Map<String, Object> properties = Map.of("field_1", "field_1_mappings", "field_2", "field_2_mappings"); Map<String, Object> runtime = Map.of("runtime_field_1", "runtime_field_1_mappings", "runtime_field_2", "runtime_field_2_mappings"); Map<String, Object> indexMappings = new HashMap<>(); indexMappings.put("properties", properties); indexMappings.put("runtime", runtime); MappingMetadata indexMappingMetadata = new MappingMetadata("_doc", indexMappings); ImmutableOpenMap.Builder<String, MappingMetadata> mappings = ImmutableOpenMap.builder(); mappings.put("index", indexMappingMetadata); GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); MappingMetadata mergedMappings = MappingsMerger.mergeMappings( newSourceWithExcludes("field_1", "runtime_field_2"), getMappingsResponse ); Map<String, Object> mappingsAsMap = mergedMappings.getSourceAsMap(); @SuppressWarnings("unchecked") Map<String, Object> propertyMappings = (Map<String, Object>) mappingsAsMap.get("properties"); assertThat(propertyMappings.keySet(), containsInAnyOrder("field_2")); @SuppressWarnings("unchecked") Map<String, Object> runtimeMappings = (Map<String, Object>) mappingsAsMap.get("runtime"); assertThat(runtimeMappings.keySet(), containsInAnyOrder("runtime_field_1")); } private static DataFrameAnalyticsSource newSource() { return new DataFrameAnalyticsSource(new String[] { "index" }, null, null, null); } private static DataFrameAnalyticsSource newSourceWithExcludes(String... excludes) { return new DataFrameAnalyticsSource(new String[] { "index" }, null, new FetchSourceContext(true, null, excludes), null); } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.metadata; import org.elasticsearch.ElasticSearchIllegalStateException; import org.elasticsearch.action.TimestampParsingException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import java.io.IOException; import java.util.Arrays; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; /** * */ public class MappingMetaData { public static class Id { public static final Id EMPTY = new Id(null); private final String path; private final String[] pathElements; public Id(String path) { this.path = path; if (path == null) { pathElements = Strings.EMPTY_ARRAY; } else { pathElements = Strings.delimitedListToStringArray(path, "."); } } public boolean hasPath() { return path != null; } public String path() { return this.path; } public String[] pathElements() { return this.pathElements; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Id id = (Id) o; if (path != null ? !path.equals(id.path) : id.path != null) return false; if (!Arrays.equals(pathElements, id.pathElements)) return false; return true; } @Override public int hashCode() { int result = path != null ? path.hashCode() : 0; result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0); return result; } } public static class Routing { public static final Routing EMPTY = new Routing(false, null); private final boolean required; private final String path; private final String[] pathElements; public Routing(boolean required, String path) { this.required = required; this.path = path; if (path == null) { pathElements = Strings.EMPTY_ARRAY; } else { pathElements = Strings.delimitedListToStringArray(path, "."); } } public boolean required() { return required; } public boolean hasPath() { return path != null; } public String path() { return this.path; } public String[] pathElements() { return this.pathElements; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Routing routing = (Routing) o; if (required != routing.required) return false; if (path != null ? !path.equals(routing.path) : routing.path != null) return false; if (!Arrays.equals(pathElements, routing.pathElements)) return false; return true; } @Override public int hashCode() { int result = (required ? 1 : 0); result = 31 * result + (path != null ? path.hashCode() : 0); result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0); return result; } } public static class Timestamp { public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter) throws TimestampParsingException { long ts; try { // if we manage to parse it, its a millisecond timestamp, just return the string as is ts = Long.parseLong(timestampAsString); return timestampAsString; } catch (NumberFormatException e) { try { ts = dateTimeFormatter.parser().parseMillis(timestampAsString); } catch (RuntimeException e1) { throw new TimestampParsingException(timestampAsString); } } return Long.toString(ts); } public static final Timestamp EMPTY = new Timestamp(false, null, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT); private final boolean enabled; private final String path; private final String format; private final String[] pathElements; private final FormatDateTimeFormatter dateTimeFormatter; public Timestamp(boolean enabled, String path, String format) { this.enabled = enabled; this.path = path; if (path == null) { pathElements = Strings.EMPTY_ARRAY; } else { pathElements = Strings.delimitedListToStringArray(path, "."); } this.format = format; this.dateTimeFormatter = Joda.forPattern(format); } public boolean enabled() { return enabled; } public boolean hasPath() { return path != null; } public String path() { return this.path; } public String[] pathElements() { return this.pathElements; } public String format() { return this.format; } public FormatDateTimeFormatter dateTimeFormatter() { return this.dateTimeFormatter; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Timestamp timestamp = (Timestamp) o; if (enabled != timestamp.enabled) return false; if (dateTimeFormatter != null ? !dateTimeFormatter.equals(timestamp.dateTimeFormatter) : timestamp.dateTimeFormatter != null) return false; if (format != null ? !format.equals(timestamp.format) : timestamp.format != null) return false; if (path != null ? !path.equals(timestamp.path) : timestamp.path != null) return false; if (!Arrays.equals(pathElements, timestamp.pathElements)) return false; return true; } @Override public int hashCode() { int result = (enabled ? 1 : 0); result = 31 * result + (path != null ? path.hashCode() : 0); result = 31 * result + (format != null ? format.hashCode() : 0); result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0); result = 31 * result + (dateTimeFormatter != null ? dateTimeFormatter.hashCode() : 0); return result; } } private final String type; private final CompressedString source; private Id id; private Routing routing; private Timestamp timestamp; public MappingMetaData(DocumentMapper docMapper) { this.type = docMapper.type(); this.source = docMapper.mappingSource(); this.id = new Id(docMapper.idFieldMapper().path()); this.routing = new Routing(docMapper.routingFieldMapper().required(), docMapper.routingFieldMapper().path()); this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), docMapper.timestampFieldMapper().path(), docMapper.timestampFieldMapper().dateTimeFormatter().format()); } public MappingMetaData(CompressedString mapping) throws IOException { this.source = mapping; Map<String, Object> mappingMap = XContentHelper.createParser(mapping.compressed(), 0, mapping.compressed().length).mapOrderedAndClose(); if (mappingMap.size() != 1) { throw new ElasticSearchIllegalStateException("Can't derive type from mapping, no root type: " + mapping.string()); } this.type = mappingMap.keySet().iterator().next(); initMappers((Map<String, Object>) mappingMap.get(this.type)); } public MappingMetaData(Map<String, Object> mapping) throws IOException { this(mapping.keySet().iterator().next(), mapping); } public MappingMetaData(String type, Map<String, Object> mapping) throws IOException { this.type = type; XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping); this.source = new CompressedString(mappingBuilder.bytes()); Map<String, Object> withoutType = mapping; if (mapping.size() == 1 && mapping.containsKey(type)) { withoutType = (Map<String, Object>) mapping.get(type); } initMappers(withoutType); } private void initMappers(Map<String, Object> withoutType) { if (withoutType.containsKey("_id")) { String path = null; Map<String, Object> routingNode = (Map<String, Object>) withoutType.get("_id"); for (Map.Entry<String, Object> entry : routingNode.entrySet()) { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("path")) { path = fieldNode.toString(); } } this.id = new Id(path); } else { this.id = Id.EMPTY; } if (withoutType.containsKey("_routing")) { boolean required = false; String path = null; Map<String, Object> routingNode = (Map<String, Object>) withoutType.get("_routing"); for (Map.Entry<String, Object> entry : routingNode.entrySet()) { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("required")) { required = nodeBooleanValue(fieldNode); } else if (fieldName.equals("path")) { path = fieldNode.toString(); } } this.routing = new Routing(required, path); } else { this.routing = Routing.EMPTY; } if (withoutType.containsKey("_timestamp")) { boolean enabled = false; String path = null; String format = TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT; Map<String, Object> timestampNode = (Map<String, Object>) withoutType.get("_timestamp"); for (Map.Entry<String, Object> entry : timestampNode.entrySet()) { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { enabled = nodeBooleanValue(fieldNode); } else if (fieldName.equals("path")) { path = fieldNode.toString(); } else if (fieldName.equals("format")) { format = fieldNode.toString(); } } this.timestamp = new Timestamp(enabled, path, format); } else { this.timestamp = Timestamp.EMPTY; } } public MappingMetaData(String type, CompressedString source, Id id, Routing routing, Timestamp timestamp) { this.type = type; this.source = source; this.id = id; this.routing = routing; this.timestamp = timestamp; } void updateDefaultMapping(MappingMetaData defaultMapping) { if (id == Id.EMPTY) { id = defaultMapping.id(); } if (routing == Routing.EMPTY) { routing = defaultMapping.routing(); } if (timestamp == Timestamp.EMPTY) { timestamp = defaultMapping.timestamp(); } } public String type() { return this.type; } public CompressedString source() { return this.source; } /** * Converts the serialized compressed form of the mappings into a parsed map. */ public Map<String, Object> sourceAsMap() throws IOException { Map<String, Object> mapping = XContentHelper.convertToMap(source.compressed(), 0, source.compressed().length, true).v2(); if (mapping.size() == 1 && mapping.containsKey(type())) { // the type name is the root value, reduce it mapping = (Map<String, Object>) mapping.get(type()); } return mapping; } /** * Converts the serialized compressed form of the mappings into a parsed map. */ public Map<String, Object> getSourceAsMap() throws IOException { return sourceAsMap(); } public Id id() { return this.id; } public Routing routing() { return this.routing; } public Timestamp timestamp() { return this.timestamp; } public ParseContext createParseContext(@Nullable String id, @Nullable String routing, @Nullable String timestamp) { return new ParseContext( id == null && id().hasPath(), routing == null && routing().hasPath(), timestamp == null && timestamp().hasPath() ); } public void parse(XContentParser parser, ParseContext parseContext) throws IOException { innerParse(parser, parseContext); } private void innerParse(XContentParser parser, ParseContext context) throws IOException { if (!context.parsingStillNeeded()) { return; } XContentParser.Token t = parser.currentToken(); if (t == null) { t = parser.nextToken(); } if (t == XContentParser.Token.START_OBJECT) { t = parser.nextToken(); } String idPart = context.idParsingStillNeeded() ? id().pathElements()[context.locationId] : null; String routingPart = context.routingParsingStillNeeded() ? routing().pathElements()[context.locationRouting] : null; String timestampPart = context.timestampParsingStillNeeded() ? timestamp().pathElements()[context.locationTimestamp] : null; for (; t == XContentParser.Token.FIELD_NAME; t = parser.nextToken()) { // Must point to field name String fieldName = parser.currentName(); // And then the value... t = parser.nextToken(); boolean incLocationId = false; boolean incLocationRouting = false; boolean incLocationTimestamp = false; if (context.idParsingStillNeeded() && fieldName.equals(idPart)) { if (context.locationId + 1 == id.pathElements().length) { context.id = parser.textOrNull(); context.idResolved = true; } else { incLocationId = true; } } if (context.routingParsingStillNeeded() && fieldName.equals(routingPart)) { if (context.locationRouting + 1 == routing.pathElements().length) { context.routing = parser.textOrNull(); context.routingResolved = true; } else { incLocationRouting = true; } } if (context.timestampParsingStillNeeded() && fieldName.equals(timestampPart)) { if (context.locationTimestamp + 1 == timestamp.pathElements().length) { context.timestamp = parser.textOrNull(); context.timestampResolved = true; } else { incLocationTimestamp = true; } } if (incLocationId || incLocationRouting || incLocationTimestamp) { if (t == XContentParser.Token.START_OBJECT) { context.locationId += incLocationId ? 1 : 0; context.locationRouting += incLocationRouting ? 1 : 0; context.locationTimestamp += incLocationTimestamp ? 1 : 0; innerParse(parser, context); context.locationId -= incLocationId ? 1 : 0; context.locationRouting -= incLocationRouting ? 1 : 0; context.locationTimestamp -= incLocationTimestamp ? 1 : 0; } } else { parser.skipChildren(); } if (!context.parsingStillNeeded()) { return; } } } public static void writeTo(MappingMetaData mappingMd, StreamOutput out) throws IOException { out.writeUTF(mappingMd.type()); mappingMd.source().writeTo(out); // id if (mappingMd.id().hasPath()) { out.writeBoolean(true); out.writeUTF(mappingMd.id().path()); } else { out.writeBoolean(false); } // routing out.writeBoolean(mappingMd.routing().required()); if (mappingMd.routing().hasPath()) { out.writeBoolean(true); out.writeUTF(mappingMd.routing().path()); } else { out.writeBoolean(false); } // timestamp out.writeBoolean(mappingMd.timestamp().enabled()); if (mappingMd.timestamp().hasPath()) { out.writeBoolean(true); out.writeUTF(mappingMd.timestamp().path()); } else { out.writeBoolean(false); } out.writeUTF(mappingMd.timestamp().format()); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; MappingMetaData that = (MappingMetaData) o; if (!id.equals(that.id)) return false; if (!routing.equals(that.routing)) return false; if (!source.equals(that.source)) return false; if (!timestamp.equals(that.timestamp)) return false; if (!type.equals(that.type)) return false; return true; } @Override public int hashCode() { int result = type.hashCode(); result = 31 * result + source.hashCode(); result = 31 * result + id.hashCode(); result = 31 * result + routing.hashCode(); result = 31 * result + timestamp.hashCode(); return result; } public static MappingMetaData readFrom(StreamInput in) throws IOException { String type = in.readUTF(); CompressedString source = CompressedString.readCompressedString(in); // id Id id = new Id(in.readBoolean() ? in.readUTF() : null); // routing Routing routing = new Routing(in.readBoolean(), in.readBoolean() ? in.readUTF() : null); // timestamp Timestamp timestamp = new Timestamp(in.readBoolean(), in.readBoolean() ? in.readUTF() : null, in.readUTF()); return new MappingMetaData(type, source, id, routing, timestamp); } public static class ParseContext { final boolean shouldParseId; final boolean shouldParseRouting; final boolean shouldParseTimestamp; int locationId = 0; int locationRouting = 0; int locationTimestamp = 0; boolean idResolved; boolean routingResolved; boolean timestampResolved; String id; String routing; String timestamp; public ParseContext(boolean shouldParseId, boolean shouldParseRouting, boolean shouldParseTimestamp) { this.shouldParseId = shouldParseId; this.shouldParseRouting = shouldParseRouting; this.shouldParseTimestamp = shouldParseTimestamp; } /** * The id value parsed, <tt>null</tt> if does not require parsing, or not resolved. */ public String id() { return id; } /** * Does id parsing really needed at all? */ public boolean shouldParseId() { return shouldParseId; } /** * Has id been resolved during the parsing phase. */ public boolean idResolved() { return idResolved; } /** * Is id parsing still needed? */ public boolean idParsingStillNeeded() { return shouldParseId && !idResolved; } /** * The routing value parsed, <tt>null</tt> if does not require parsing, or not resolved. */ public String routing() { return routing; } /** * Does routing parsing really needed at all? */ public boolean shouldParseRouting() { return shouldParseRouting; } /** * Has routing been resolved during the parsing phase. */ public boolean routingResolved() { return routingResolved; } /** * Is routing parsing still needed? */ public boolean routingParsingStillNeeded() { return shouldParseRouting && !routingResolved; } /** * The timestamp value parsed, <tt>null</tt> if does not require parsing, or not resolved. */ public String timestamp() { return timestamp; } /** * Does timestamp parsing really needed at all? */ public boolean shouldParseTimestamp() { return shouldParseTimestamp; } /** * Has timestamp been resolved during the parsing phase. */ public boolean timestampResolved() { return timestampResolved; } /** * Is timestamp parsing still needed? */ public boolean timestampParsingStillNeeded() { return shouldParseTimestamp && !timestampResolved; } /** * Do we really need parsing? */ public boolean shouldParse() { return shouldParseId || shouldParseRouting || shouldParseTimestamp; } /** * Is parsing still needed? */ public boolean parsingStillNeeded() { return idParsingStillNeeded() || routingParsingStillNeeded() || timestampParsingStillNeeded(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package io.hops.experiments.workload.generator; import io.hops.experiments.benchmarks.common.coin.FileSizeMultiFaceCoin; import io.hops.experiments.benchmarks.common.config.ConfigKeys; import io.hops.experiments.controller.Logger; import org.apache.commons.lang.StringUtils; import java.io.FileNotFoundException; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.*; /** * * @author salman */ public class FileTreeGenerator implements FilePool { private Random rand1; private UUID uuid = null; protected List<String> allThreadFiles; protected List<String> allThreadDirs; protected String threadDir; private NameSpaceGenerator nameSpaceGenerator; private final int THRESHOLD = 3; private int currIndex = -1; private FileSizeMultiFaceCoin fileSizeCoin; private long currentFileSize = -1; private long currentFileDataRead = -1; public FileTreeGenerator(String baseDir, int filesPerDir, int dirPerDir, int initialTreeDepth, String fileDistribution) { this.allThreadFiles = new ArrayList<String>(10000); this.allThreadDirs = new ArrayList<String>(10000); this.rand1 = new Random(System.currentTimeMillis()); uuid = UUID.randomUUID(); if (fileDistribution == null){// return 0 fileDistribution = ConfigKeys.FILE_SIZE_IN_Bytes_DEFAULT; } fileSizeCoin = new FileSizeMultiFaceCoin(fileDistribution); String machineName = ""; try { machineName = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { machineName = "Client_Machine+" + rand1.nextInt(); } baseDir = baseDir.trim(); if (!baseDir.endsWith("/")) { baseDir = baseDir + "/"; } if(baseDir.compareTo("/")==0){ threadDir = baseDir + machineName+"_"+uuid; }else{ threadDir = baseDir + machineName+"/"+uuid; } String[] comp = PathUtils.getPathNames(threadDir); int more = 0; if (initialTreeDepth - comp.length > 0) { more = initialTreeDepth - comp.length; for (int i = comp.length; i < (initialTreeDepth); i++) { threadDir += "/added_depth_" + i; } } nameSpaceGenerator = new NameSpaceGenerator(threadDir, filesPerDir, dirPerDir); } @Override public String getDirToCreate() { String path = nameSpaceGenerator.generateNewDirPath(); allThreadDirs.add(path); return path; } @Override public String getFileToCreate() { String path = nameSpaceGenerator.getFileToCreate(); return path; } @Override public void fileCreationSucceeded(String file) { allThreadFiles.add(file); } @Override public String getFileToRead() { return getRandomFile(); } @Override public String getFileToRename() { if (allThreadFiles.isEmpty()) { return null; } for (int i = 0; i < allThreadFiles.size(); i++) { currIndex = rand1.nextInt(allThreadFiles.size()); String path = allThreadFiles.get(currIndex); if (getPathLength(path) < THRESHOLD) { continue; } //System.out.println("Rename path "+path); return path; } return null; } @Override public void fileRenamed(String from, String to) { String curr = allThreadFiles.get(currIndex); if(curr != from){ IllegalStateException up = new IllegalStateException("File name did not match."); throw up; } allThreadFiles.set(currIndex, to); } @Override public String getFileToDelete() { if (allThreadFiles.isEmpty()) { return null; } if(allThreadFiles.size()>0){ currIndex = allThreadFiles.size()-1; String file = allThreadFiles.remove(currIndex); return file; } return null; // currIndex = allThreadFiles.size(); // for (int i = 0; i < allThreadFiles.size(); i++) { // String file = allThreadFiles.remove(currIndex); // if(getPathLength(file) < THRESHOLD){ // continue; // } // System.out.println("Delete Path "+file); // return file; // } // return null; } @Override public String getDirToStat() { return getRandomDir(); } @Override public String getFileToStat() { return getRandomFile(); } @Override public String getFilePathToChangePermissions() { return getRandomFile(); } @Override public String getDirPathToChangePermissions() { return getRandomDir(); } @Override public String getFileToInfo() { return getRandomFile(); } @Override public String getDirToInfo() { return getRandomDir(); } @Override public String getFileToSetReplication() { return getRandomFile(); } @Override public String getFileToAppend() { return getRandomFile(); } @Override public String getFileToChown() { return getRandomFile(); } @Override public String getDirToChown() { return getRandomDir(); } @Override public long getFileData(byte[] buffer) throws IOException { long toRead = -1; if((currentFileDataRead+buffer.length)>=currentFileSize){ toRead = currentFileSize - currentFileDataRead; } else{ toRead = buffer.length; } if(toRead>0) { for (int i = 0; i < toRead; i++) { buffer[i] = 0; } currentFileDataRead += toRead; return toRead; } else { return -1; } } @Override public long getNewFileSize() throws IOException { currentFileSize = fileSizeCoin.getFileSize(); currentFileDataRead = 0; return currentFileSize; } @Override public boolean hasMoreFilesToWrite(){ return true; } private String getRandomFile() { if (!allThreadFiles.isEmpty()) { for (int i = 0; i < allThreadFiles.size(); i++) { currIndex = rand1.nextInt(allThreadFiles.size()); String path = allThreadFiles.get(currIndex); if (getPathLength(path) < THRESHOLD) { continue; } // System.out.println("Path "+path); return path; } } System.err.println("Unable to getRandomFile from file pool: "+this+" PoolSize is: "+allThreadFiles.size()); Logger.printMsg("Error: Unable to getRandomFile from file pool: "+this+" PoolSize is: "+allThreadFiles.size()); return null; } private int getPathLength(String path){ // return PathUtils.getPathNames(path).length; return StringUtils.countMatches(path,"/"); } public String getRandomDir() { if (!allThreadFiles.isEmpty()) { for (int i = 0; i < allThreadFiles.size(); i++) { currIndex = rand1.nextInt(allThreadFiles.size()); String path = allThreadFiles.get(currIndex); int dirIndex = path.lastIndexOf("/"); path = path.substring(0, dirIndex); if (getPathLength(path) < THRESHOLD) { continue; } // System.out.println("Path "+path+ " after retires: "+i); return path; } } System.err.println("Unable to getRandomDir from file pool: "+this+" PoolSize is: "+allThreadFiles.size()); Logger.printMsg("Error: Unable to getRandomDir from file pool: "+this+" PoolSize is: "+allThreadFiles.size()); return null; } }
/* Copyright 2011 Selenium committers Copyright 2011 Software Freedom Conservancy Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium.testing.drivers; import static com.google.common.base.Preconditions.checkNotNull; import static org.openqa.selenium.Platform.LINUX; import static org.openqa.selenium.Platform.WINDOWS; import static org.openqa.selenium.testing.Ignore.Driver.ALL; import static org.openqa.selenium.testing.Ignore.Driver.CHROME; import static org.openqa.selenium.testing.Ignore.Driver.FIREFOX; import static org.openqa.selenium.testing.Ignore.Driver.HTMLUNIT; import static org.openqa.selenium.testing.Ignore.Driver.IE; import static org.openqa.selenium.testing.Ignore.Driver.MARIONETTE; import static org.openqa.selenium.testing.Ignore.Driver.OPERA; import static org.openqa.selenium.testing.Ignore.Driver.OPERA_MOBILE; import static org.openqa.selenium.testing.Ignore.Driver.PHANTOMJS; import static org.openqa.selenium.testing.Ignore.Driver.QTWEBKIT; import static org.openqa.selenium.testing.Ignore.Driver.REMOTE; import static org.openqa.selenium.testing.Ignore.Driver.SAFARI; import static org.openqa.selenium.testing.drivers.Browser.chrome; import static org.openqa.selenium.testing.drivers.Browser.htmlunit; import static org.openqa.selenium.testing.drivers.Browser.htmlunit_js; import static org.openqa.selenium.testing.drivers.Browser.ie; import static org.openqa.selenium.testing.drivers.Browser.opera; import static org.openqa.selenium.testing.drivers.Browser.phantomjs; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import org.junit.runners.model.FrameworkMethod; import org.openqa.selenium.Platform; import org.openqa.selenium.testing.Ignore; import org.openqa.selenium.testing.JavascriptEnabled; import org.openqa.selenium.testing.NativeEventsRequired; import org.openqa.selenium.testing.NeedsLocalEnvironment; import java.util.Arrays; import java.util.Set; /** * Class that decides whether a test class or method should be ignored. */ public class TestIgnorance { private Set<Browser> alwaysNativeEvents = ImmutableSet.of(chrome, ie, opera); private Set<Browser> neverNativeEvents = ImmutableSet.of( htmlunit, htmlunit_js, phantomjs); private IgnoreComparator ignoreComparator = new IgnoreComparator(); private Set<String> methods = Sets.newHashSet(); private Set<String> only = Sets.newHashSet(); private Set<String> ignoreMethods = Sets.newHashSet(); private Browser browser; public TestIgnorance(Browser browser) { setBrowser(browser); String onlyRun = System.getProperty("only_run"); if (onlyRun != null) { only.addAll(Arrays.asList(onlyRun.split(","))); } String method = System.getProperty("method"); if (method != null) { methods.addAll(Arrays.asList(method.split(","))); } String skip = System.getProperty("ignore_method"); if (skip != null) { ignoreMethods.addAll(Arrays.asList(skip.split(","))); } } // JUnit 4 public boolean isIgnored(FrameworkMethod method, Object test) { boolean ignored = ignoreComparator.shouldIgnore(test.getClass().getAnnotation(Ignore.class)) || ignoreComparator.shouldIgnore(method.getMethod().getAnnotation(Ignore.class)); ignored |= isIgnoredBecauseOfJUnit4Ignore(test.getClass().getAnnotation(org.junit.Ignore.class)); ignored |= isIgnoredBecauseOfJUnit4Ignore(method.getMethod().getAnnotation(org.junit.Ignore.class)); if (Boolean.getBoolean("ignored_only")) { ignored = !ignored; } ignored |= isIgnoredDueToJavascript(test.getClass().getAnnotation(JavascriptEnabled.class)); ignored |= isIgnoredDueToJavascript(method.getMethod().getAnnotation(JavascriptEnabled.class)); ignored |= isIgnoredBecauseOfNativeEvents(test.getClass().getAnnotation(NativeEventsRequired.class)); ignored |= isIgnoredBecauseOfNativeEvents(method.getMethod().getAnnotation(NativeEventsRequired.class)); ignored |= isIgnoredDueToEnvironmentVariables(method, test); ignored |= isIgnoredDueToBeingOnSauce(method, test); return ignored; } private boolean isIgnoredBecauseOfJUnit4Ignore(org.junit.Ignore annotation) { return annotation != null; } private boolean isIgnoredBecauseOfNativeEvents(NativeEventsRequired annotation) { if (annotation == null) { return false; } if (neverNativeEvents.contains(browser)) { return true; } if (alwaysNativeEvents.contains(browser)) { return false; } if (!Boolean.getBoolean("selenium.browser.native_events")) { return true; } // We only have native events on Linux and Windows. Platform platform = getEffectivePlatform(); return !(platform.is(LINUX) || platform.is(WINDOWS)); } private static Platform getEffectivePlatform() { if (SauceDriver.shouldUseSauce()) { return SauceDriver.getEffectivePlatform(); } return Platform.getCurrent(); } private boolean isIgnoredDueToBeingOnSauce(FrameworkMethod method, Object test) { return SauceDriver.shouldUseSauce() && (method.getMethod().getAnnotation(NeedsLocalEnvironment.class) != null || test.getClass().getAnnotation(NeedsLocalEnvironment.class) != null); } private boolean isIgnoredDueToJavascript(JavascriptEnabled enabled) { return enabled != null && !browser.isJavascriptEnabled(); } private boolean isIgnoredDueToEnvironmentVariables(FrameworkMethod method, Object test) { return (!only.isEmpty() && !only.contains(test.getClass().getSimpleName())) || (!methods.isEmpty() && !methods.contains(method.getName())) || ignoreMethods.contains(method.getName()); } public void setBrowser(Browser browser) { this.browser = checkNotNull(browser, "Browser to use must be set"); addIgnoresForBrowser(browser, ignoreComparator); } private void addIgnoresForBrowser(Browser browser, IgnoreComparator comparator) { if (Boolean.getBoolean("selenium.browser.remote") || SauceDriver.shouldUseSauce()) { comparator.addDriver(REMOTE); } switch (browser) { case chrome: comparator.addDriver(CHROME); break; case ff: if (Boolean.getBoolean("webdriver.firefox.marionette")) { comparator.addDriver(MARIONETTE); } else { comparator.addDriver(FIREFOX); } break; case htmlunit: case htmlunit_js: comparator.addDriver(HTMLUNIT); break; case ie: comparator.addDriver(IE); break; case none: comparator.addDriver(ALL); break; case opera: comparator.addDriver(OPERA); break; case opera_mobile: comparator.addDriver(OPERA_MOBILE); comparator.addDriver(REMOTE); break; case phantomjs: comparator.addDriver(PHANTOMJS); break; case safari: comparator.addDriver(SAFARI); break; case qtwebkit: comparator.addDriver(QTWEBKIT); break; default: throw new RuntimeException("Cannot determine which ignore to add ignores rules for"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.flink; import static org.apache.beam.sdk.testing.RegexMatcher.matches; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.core.Every.everyItem; import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.io.Serializable; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import org.apache.beam.runners.core.construction.PTransformMatchers; import org.apache.beam.runners.core.construction.PTransformTranslation; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.GenerateSequence; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.runners.PTransformOverride; import org.apache.beam.sdk.runners.PTransformOverrideFactory; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.windowing.FixedWindows; import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Charsets; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.RemoteEnvironment; import org.apache.flink.streaming.api.environment.RemoteStreamEnvironment; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.hamcrest.BaseMatcher; import org.hamcrest.Description; import org.hamcrest.Matchers; import org.joda.time.Duration; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.powermock.reflect.Whitebox; /** Tests for {@link FlinkPipelineExecutionEnvironment}. */ @RunWith(JUnit4.class) public class FlinkPipelineExecutionEnvironmentTest implements Serializable { @Rule public transient TemporaryFolder tmpFolder = new TemporaryFolder(); @Test public void shouldRecognizeAndTranslateStreamingPipeline() { FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class); options.setRunner(TestFlinkRunner.class); options.setFlinkMaster("[auto]"); FlinkPipelineExecutionEnvironment flinkEnv = new FlinkPipelineExecutionEnvironment(options); Pipeline pipeline = Pipeline.create(); pipeline .apply(GenerateSequence.from(0).withRate(1, Duration.standardSeconds(1))) .apply( ParDo.of( new DoFn<Long, String>() { @ProcessElement public void processElement(ProcessContext c) throws Exception { c.output(Long.toString(c.element())); } })) .apply(Window.into(FixedWindows.of(Duration.standardHours(1)))) .apply(TextIO.write().withNumShards(1).withWindowedWrites().to("/dummy/path")); flinkEnv.translate(pipeline); // no exception should be thrown } @Test public void shouldPrepareFilesToStageWhenFlinkMasterIsSetExplicitly() throws IOException { FlinkPipelineOptions options = testPreparingResourcesToStage("localhost:8081", false); assertThat(options.getFilesToStage().size(), is(2)); assertThat(options.getFilesToStage().get(0), matches(".*\\.jar")); } @Test public void shouldFailWhenFileDoesNotExistAndFlinkMasterIsSetExplicitly() { assertThrows( "To-be-staged file does not exist: ", IllegalStateException.class, () -> testPreparingResourcesToStage("localhost:8081", true)); } @Test public void shouldNotPrepareFilesToStageWhenFlinkMasterIsSetToAuto() throws IOException { FlinkPipelineOptions options = testPreparingResourcesToStage("[auto]"); assertThat(options.getFilesToStage().size(), is(3)); assertThat(options.getFilesToStage(), everyItem(not(matches(".*\\.jar")))); } @Test public void shouldNotPrepareFilesToStagewhenFlinkMasterIsSetToCollection() throws IOException { FlinkPipelineOptions options = testPreparingResourcesToStage("[collection]"); assertThat(options.getFilesToStage().size(), is(3)); assertThat(options.getFilesToStage(), everyItem(not(matches(".*\\.jar")))); } @Test public void shouldNotPrepareFilesToStageWhenFlinkMasterIsSetToLocal() throws IOException { FlinkPipelineOptions options = testPreparingResourcesToStage("[local]"); assertThat(options.getFilesToStage().size(), is(3)); assertThat(options.getFilesToStage(), everyItem(not(matches(".*\\.jar")))); } @Test public void shouldUseDefaultTempLocationIfNoneSet() { FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class); options.setRunner(TestFlinkRunner.class); options.setFlinkMaster("clusterAddress"); FlinkPipelineExecutionEnvironment flinkEnv = new FlinkPipelineExecutionEnvironment(options); Pipeline pipeline = Pipeline.create(options); flinkEnv.translate(pipeline); String defaultTmpDir = System.getProperty("java.io.tmpdir"); assertThat(options.getFilesToStage(), hasItem(startsWith(defaultTmpDir))); } @Test public void shouldUsePreparedFilesOnRemoteEnvironment() throws Exception { FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class); options.setRunner(TestFlinkRunner.class); options.setFlinkMaster("clusterAddress"); FlinkPipelineExecutionEnvironment flinkEnv = new FlinkPipelineExecutionEnvironment(options); Pipeline pipeline = Pipeline.create(options); flinkEnv.translate(pipeline); ExecutionEnvironment executionEnvironment = flinkEnv.getBatchExecutionEnvironment(); assertThat(executionEnvironment, instanceOf(RemoteEnvironment.class)); @SuppressWarnings("unchecked") List<URL> jarFiles = (List<URL>) Whitebox.getInternalState(executionEnvironment, "jarFiles"); List<URL> urlConvertedStagedFiles = convertFilesToURLs(options.getFilesToStage()); assertThat(jarFiles, is(urlConvertedStagedFiles)); } @Test public void shouldUsePreparedFilesOnRemoteStreamEnvironment() throws Exception { FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class); options.setRunner(TestFlinkRunner.class); options.setFlinkMaster("clusterAddress"); options.setStreaming(true); FlinkPipelineExecutionEnvironment flinkEnv = new FlinkPipelineExecutionEnvironment(options); Pipeline pipeline = Pipeline.create(options); flinkEnv.translate(pipeline); StreamExecutionEnvironment streamExecutionEnvironment = flinkEnv.getStreamExecutionEnvironment(); assertThat(streamExecutionEnvironment, instanceOf(RemoteStreamEnvironment.class)); @SuppressWarnings("unchecked") List<URL> jarFiles = (List<URL>) Whitebox.getInternalState(streamExecutionEnvironment, "jarFiles"); List<URL> urlConvertedStagedFiles = convertFilesToURLs(options.getFilesToStage()); assertThat(jarFiles, is(urlConvertedStagedFiles)); } @Test public void shouldUseTransformOverrides() { boolean[] testParameters = {true, false}; for (boolean streaming : testParameters) { FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class); options.setStreaming(streaming); options.setRunner(FlinkRunner.class); FlinkPipelineExecutionEnvironment flinkEnv = new FlinkPipelineExecutionEnvironment(options); Pipeline p = Mockito.spy(Pipeline.create(options)); flinkEnv.translate(p); ArgumentCaptor<ImmutableList> captor = ArgumentCaptor.forClass(ImmutableList.class); Mockito.verify(p).replaceAll(captor.capture()); ImmutableList<PTransformOverride> overridesList = captor.getValue(); assertThat(overridesList.isEmpty(), is(false)); assertThat( overridesList.size(), is(FlinkTransformOverrides.getDefaultOverrides(options).size())); } } @Test public void shouldProvideParallelismToTransformOverrides() { FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class); options.setStreaming(true); options.setRunner(FlinkRunner.class); FlinkPipelineExecutionEnvironment flinkEnv = new FlinkPipelineExecutionEnvironment(options); Pipeline p = Pipeline.create(options); // Create a transform applicable for PTransformMatchers.writeWithRunnerDeterminedSharding() // which requires parallelism p.apply(Create.of("test")).apply(TextIO.write().to("/tmp")); p = Mockito.spy(p); // If this succeeds we're ok flinkEnv.translate(p); // Verify we were using desired replacement transform ArgumentCaptor<ImmutableList> captor = ArgumentCaptor.forClass(ImmutableList.class); Mockito.verify(p).replaceAll(captor.capture()); ImmutableList<PTransformOverride> overridesList = captor.getValue(); assertThat( overridesList, hasItem( new BaseMatcher<PTransformOverride>() { @Override public void describeTo(Description description) {} @Override public boolean matches(Object actual) { if (actual instanceof PTransformOverride) { PTransformOverrideFactory overrideFactory = ((PTransformOverride) actual).getOverrideFactory(); if (overrideFactory instanceof FlinkStreamingPipelineTranslator.StreamingShardedWriteFactory) { FlinkStreamingPipelineTranslator.StreamingShardedWriteFactory factory = (FlinkStreamingPipelineTranslator.StreamingShardedWriteFactory) overrideFactory; return factory.options.getParallelism() > 0; } } return false; } })); } @Test public void shouldUseStreamingTransformOverridesWithUnboundedSources() { FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class); // no explicit streaming mode set options.setRunner(FlinkRunner.class); FlinkPipelineExecutionEnvironment flinkEnv = new FlinkPipelineExecutionEnvironment(options); Pipeline p = Mockito.spy(Pipeline.create(options)); // Add unbounded source which will set the streaming mode to true p.apply(GenerateSequence.from(0)); flinkEnv.translate(p); ArgumentCaptor<ImmutableList> captor = ArgumentCaptor.forClass(ImmutableList.class); Mockito.verify(p).replaceAll(captor.capture()); ImmutableList<PTransformOverride> overridesList = captor.getValue(); assertThat( overridesList, hasItem( PTransformOverride.of( PTransformMatchers.urnEqualTo(PTransformTranslation.CREATE_VIEW_TRANSFORM_URN), CreateStreamingFlinkView.Factory.INSTANCE))); } @Test public void testTranslationModeOverrideWithUnboundedSources() { FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class); options.setRunner(FlinkRunner.class); options.setStreaming(false); FlinkPipelineExecutionEnvironment flinkEnv = new FlinkPipelineExecutionEnvironment(options); Pipeline pipeline = Pipeline.create(options); pipeline.apply(GenerateSequence.from(0)); flinkEnv.translate(pipeline); assertThat(options.isStreaming(), Matchers.is(true)); } @Test public void testTranslationModeNoOverrideWithoutUnboundedSources() { boolean[] testArgs = new boolean[] {true, false}; for (boolean streaming : testArgs) { FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class); options.setRunner(FlinkRunner.class); options.setStreaming(streaming); FlinkPipelineExecutionEnvironment flinkEnv = new FlinkPipelineExecutionEnvironment(options); Pipeline pipeline = Pipeline.create(options); pipeline.apply(GenerateSequence.from(0).to(10)); flinkEnv.translate(pipeline); assertThat(options.isStreaming(), Matchers.is(streaming)); } } @Test public void shouldLogWarningWhenCheckpointingIsDisabled() { Pipeline pipeline = Pipeline.create(); pipeline.getOptions().setRunner(TestFlinkRunner.class); pipeline // Add an UnboundedSource to check for the warning if checkpointing is disabled .apply(GenerateSequence.from(0)) .apply( ParDo.of( new DoFn<Long, Void>() { @ProcessElement public void processElement(ProcessContext ctx) { throw new RuntimeException("Failing here is ok."); } })); final PrintStream oldErr = System.err; ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); PrintStream replacementStdErr = new PrintStream(byteArrayOutputStream); try { System.setErr(replacementStdErr); // Run pipeline and fail during execution pipeline.run(); fail("Should have failed"); } catch (Exception e) { // We want to fail here } finally { System.setErr(oldErr); } replacementStdErr.flush(); assertThat( new String(byteArrayOutputStream.toByteArray(), Charsets.UTF_8), containsString( "UnboundedSources present which rely on checkpointing, but checkpointing is disabled.")); } private FlinkPipelineOptions testPreparingResourcesToStage(String flinkMaster) throws IOException { return testPreparingResourcesToStage(flinkMaster, true); } private FlinkPipelineOptions testPreparingResourcesToStage( String flinkMaster, boolean includeNonExisting) throws IOException { Pipeline pipeline = Pipeline.create(); String tempLocation = tmpFolder.newFolder().getAbsolutePath(); List<String> filesToStage = new ArrayList<>(); File stagingDir = tmpFolder.newFolder(); stagingDir.createNewFile(); filesToStage.add(stagingDir.getAbsolutePath()); File individualStagingFile = tmpFolder.newFile(); filesToStage.add(individualStagingFile.getAbsolutePath()); if (includeNonExisting) { filesToStage.add("/path/to/not/existing/dir"); } FlinkPipelineOptions options = setPipelineOptions(flinkMaster, tempLocation, filesToStage); FlinkPipelineExecutionEnvironment flinkEnv = new FlinkPipelineExecutionEnvironment(options); flinkEnv.translate(pipeline); return options; } private FlinkPipelineOptions setPipelineOptions( String flinkMaster, String tempLocation, List<String> filesToStage) { FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class); options.setRunner(TestFlinkRunner.class); options.setFlinkMaster(flinkMaster); options.setTempLocation(tempLocation); options.setFilesToStage(filesToStage); return options; } private static List<URL> convertFilesToURLs(List<String> filePaths) { return filePaths.stream() .map( file -> { try { return new File(file).getAbsoluteFile().toURI().toURL(); } catch (MalformedURLException e) { throw new RuntimeException("Failed to convert to URL", e); } }) .collect(Collectors.toList()); } }
/* * Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.andes.internal; import com.hazelcast.core.HazelcastInstance; import org.apache.axis2.clustering.ClusteringAgent; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; import org.osgi.service.component.ComponentContext; import org.wso2.andes.configuration.AndesConfigurationManager; import org.wso2.andes.configuration.enums.AndesConfiguration; import org.wso2.andes.kernel.AndesContext; import org.wso2.andes.kernel.AndesException; import org.wso2.andes.kernel.AndesKernelBoot; import org.wso2.andes.server.BrokerOptions; import org.wso2.andes.server.Main; import org.wso2.andes.server.cluster.coordination.hazelcast.HazelcastAgent; import org.wso2.andes.server.registry.ApplicationRegistry; import org.wso2.andes.wso2.service.QpidNotificationService; import org.wso2.carbon.andes.authentication.service.AuthenticationService; import org.wso2.carbon.andes.event.core.EventBundleNotificationService; import org.wso2.carbon.andes.event.core.qpid.QpidServerDetails; import org.wso2.carbon.andes.listeners.BrokerLifecycleListener; import org.wso2.carbon.andes.listeners.MessageBrokerTenantManagementListener; import org.wso2.carbon.andes.service.QpidService; import org.wso2.carbon.andes.service.QpidServiceImpl; import org.wso2.carbon.andes.service.exception.ConfigurationException; import org.wso2.carbon.andes.utils.MessageBrokerDBUtil; import org.wso2.carbon.base.ServerConfiguration; import org.wso2.carbon.base.api.ServerConfigurationService; import org.wso2.carbon.core.ServerRestartHandler; import org.wso2.carbon.core.ServerShutdownHandler; import org.wso2.carbon.server.admin.common.IServerAdmin; import org.wso2.carbon.stratos.common.listeners.TenantMgtListener; import org.wso2.carbon.utils.ConfigurationContextService; import java.io.IOException; import java.lang.management.ManagementFactory; import java.net.InetAddress; import java.net.Socket; import java.util.Set; import java.util.Stack; import javax.management.MBeanServer; import javax.management.MalformedObjectNameException; import javax.management.ObjectName; import org.osgi.service.component.annotations.Activate; import org.osgi.service.component.annotations.Component; import org.osgi.service.component.annotations.Deactivate; import org.osgi.service.component.annotations.Reference; import org.osgi.service.component.annotations.ReferenceCardinality; import org.osgi.service.component.annotations.ReferencePolicy; @Component( name = "org.wso2.carbon.andes.internal.QpidServiceComponent", immediate = true) public class QpidServiceComponent { private static final Log log = LogFactory.getLog(QpidServiceComponent.class); private static final String CARBON_CONFIG_PORT_OFFSET = "Ports.Offset"; private static final int CARBON_DEFAULT_PORT_OFFSET = 0; protected static final String MODE_STANDALONE = "standalone"; protected static final String MODE_DEFAULT = "default"; private static BundleContext bundleContext; private static Stack<ServiceRegistration> registrations = new Stack<ServiceRegistration>(); /** * This is used in the situations where the Hazelcast instance is not registered but the activate method of the * QpidServiceComponent is called when clustering is enabled. * This property is used to block the process of starting the broker until the hazelcast instance getting * registered. */ private boolean brokerShouldBeStarted = false; /** * This flag true if HazelcastInstance has been registered. */ private boolean registeredHazelcast = false; /** * This holds the configuration values */ private QpidServiceImpl qpidServiceImpl; @Activate protected void activate(ComponentContext context) throws AndesException { try { // Initialize AndesConfigurationManager AndesConfigurationManager.initialize(readPortOffset()); // Load qpid specific configurations qpidServiceImpl = new QpidServiceImpl(QpidServiceDataHolder.getInstance().getAccessKey()); qpidServiceImpl.loadConfigurations(); // Register tenant management listener for Message Broker bundleContext = context.getBundleContext(); MessageBrokerTenantManagementListener tenantManagementListener = new MessageBrokerTenantManagementListener(); registrations.push(bundleContext.registerService(TenantMgtListener.class.getName(), tenantManagementListener, null)); // set message store and andes context store related configurations AndesContext.getInstance().constructStoreConfiguration(); // Read deployment mode String mode = AndesConfigurationManager.readValue(AndesConfiguration.DEPLOYMENT_MODE); // Start broker in standalone mode if (mode.equalsIgnoreCase(MODE_STANDALONE)) { // set clustering enabled to false because even though clustering enabled in axis2.xml, we are not // going to consider it in standalone mode AndesContext.getInstance().setClusteringEnabled(false); this.startAndesBroker(); } else if (mode.equalsIgnoreCase(MODE_DEFAULT)) { // Start broker in HA mode if (!AndesContext.getInstance().isClusteringEnabled()) { // If clustering is disabled, broker starts without waiting for hazelcastInstance this.startAndesBroker(); } else { // Start broker in distributed mode if (registeredHazelcast) { // When clustering is enabled, starts broker only if the hazelcastInstance has also been // registered. this.startAndesBroker(); } else { // If hazelcastInstance has not been registered yet, turn the brokerShouldBeStarted flag to // true and wait for hazelcastInstance to be registered. this.brokerShouldBeStarted = true; } } } else { throw new ConfigurationException("Invalid value " + mode + " for deployment/mode in broker.xml"); } MBShutdownHandler mbShutdownHandler = new MBShutdownHandler(); registrations.push(bundleContext.registerService(ServerShutdownHandler.class.getName(), mbShutdownHandler, null)); registrations.push(bundleContext.registerService(ServerRestartHandler.class.getName(), mbShutdownHandler, null)); } catch (ConfigurationException e) { log.error("Invalid configuration found in a configuration file", e); this.shutdown(); } } @Deactivate protected void deactivate(ComponentContext ctx) { // Unregister services while (!registrations.empty()) { registrations.pop().unregister(); } bundleContext = null; } @Reference( name = "org.wso2.carbon.andes.authentication.service.AuthenticationService", service = org.wso2.carbon.andes.authentication.service.AuthenticationService.class, cardinality = ReferenceCardinality.MANDATORY, policy = ReferencePolicy.DYNAMIC, unbind = "unsetAccessKey") protected void setAccessKey(AuthenticationService authenticationService) { QpidServiceDataHolder.getInstance().setAccessKey(authenticationService.getAccessKey()); } protected void unsetAccessKey(AuthenticationService authenticationService) { QpidServiceDataHolder.getInstance().setAccessKey(null); } @Reference( name = "org.wso2.andes.wso2.service.QpidNotificationService", service = org.wso2.andes.wso2.service.QpidNotificationService.class, cardinality = ReferenceCardinality.MANDATORY, policy = ReferencePolicy.DYNAMIC, unbind = "unsetQpidNotificationService") protected void setQpidNotificationService(QpidNotificationService qpidNotificationService) { // Qpid broker should not start until Qpid bundle is activated. // QpidNotificationService informs that the Qpid bundle has started. } protected void unsetQpidNotificationService(QpidNotificationService qpidNotificationService) { } @Reference( name = "server.configuration", service = org.wso2.carbon.base.api.ServerConfigurationService.class, cardinality = ReferenceCardinality.MANDATORY, policy = ReferencePolicy.DYNAMIC, unbind = "unsetServerConfiguration") protected void setServerConfiguration(ServerConfigurationService serverConfiguration) { QpidServiceDataHolder.getInstance().setCarbonConfiguration(serverConfiguration); } protected void unsetServerConfiguration(ServerConfigurationService serverConfiguration) { QpidServiceDataHolder.getInstance().setCarbonConfiguration(null); } @Reference( name = "event.broker", service = org.wso2.carbon.andes.event.core.EventBundleNotificationService.class, cardinality = ReferenceCardinality.MANDATORY, policy = ReferencePolicy.DYNAMIC, unbind = "unsetEventBundleNotificationService") protected void setEventBundleNotificationService(EventBundleNotificationService eventBundleNotificationService) { QpidServiceDataHolder.getInstance().registerEventBundleNotificationService(eventBundleNotificationService); } protected void unsetEventBundleNotificationService(EventBundleNotificationService eventBundleNotificationService) { // unsetting } /** * Access Hazelcast Instance, which is exposed as an OSGI service. * * @param hazelcastInstance hazelcastInstance found from the OSGI service */ @Reference( name = "hazelcast.instance.service", service = com.hazelcast.core.HazelcastInstance.class, cardinality = ReferenceCardinality.OPTIONAL, policy = ReferencePolicy.DYNAMIC, unbind = "unsetHazelcastInstance") protected void setHazelcastInstance(HazelcastInstance hazelcastInstance) throws AndesException { HazelcastAgent.getInstance().init(hazelcastInstance); registeredHazelcast = true; if (brokerShouldBeStarted) { // getting registered try { this.startAndesBroker(); } catch (ConfigurationException e) { log.error("Invalid configuration found in a configuration file", e); this.shutdown(); } } } protected void unsetHazelcastInstance(HazelcastInstance hazelcastInstance) { } /** * Access ConfigurationContextService, which is exposed as an OSGI service, to read cluster configuration. * * @param configurationContextService ConfigurationContextService from the OSGI service */ @Reference( name = "config.context.service", service = org.wso2.carbon.utils.ConfigurationContextService.class, cardinality = ReferenceCardinality.MANDATORY, policy = ReferencePolicy.DYNAMIC, unbind = "unsetConfigurationContextService") protected void setConfigurationContextService(ConfigurationContextService configurationContextService) { ClusteringAgent agent = configurationContextService.getServerConfigContext().getAxisConfiguration() .getClusteringAgent(); AndesContext.getInstance().setClusteringEnabled(agent != null); } protected void unsetConfigurationContextService(ConfigurationContextService configurationContextService) { // Do nothing } /** * Access IServerAdmin, which is exposed as an OSGi service, to call the graceful shutdown method in the carbon * kernel. */ @Reference( name = "org.wso2.carbon.server.admin.common.IServerAdmin", service = org.wso2.carbon.server.admin.common.IServerAdmin.class, cardinality = ReferenceCardinality.MANDATORY, policy = ReferencePolicy.DYNAMIC, unbind = "unsetIServerAdmin") protected void setIServerAdmin(IServerAdmin iServerAdmin) { QpidServiceDataHolder.getInstance().setService(iServerAdmin); } /** * Unset IServerAdmin OSGi service */ protected void unsetIServerAdmin(IServerAdmin iServerAdmin) { QpidServiceDataHolder.getInstance().setService(null); } /** * Shutdown from the carbon kernel level. */ private void shutdown() throws AndesException { // Calling carbon kernel shutdown method, inside the ServerAdmin component try { QpidServiceDataHolder.getInstance().getService().shutdownGracefully(); } catch (Exception e) { log.error("Error occurred while shutting down", e); throw new AndesException("Error occurred while shutting down", e); } } /** * Check if the broker is up and running * * @return true if the broker is running or false otherwise */ private boolean isBrokerRunning() { boolean response = false; try { MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer(); Set<ObjectName> set = mBeanServer.queryNames(new ObjectName("org.wso2.andes:type=VirtualHost" + ".VirtualHostManager,*"), null); if (set.size() > 0) { // Virtual hosts created, hence broker running. response = true; } } catch (MalformedObjectNameException e) { log.error("Error checking if broker is running.", e); } return response; } private int readPortOffset() { ServerConfiguration carbonConfig = ServerConfiguration.getInstance(); String portOffset = System.getProperty("portOffset", carbonConfig.getFirstProperty(CARBON_CONFIG_PORT_OFFSET)); try { return ((portOffset != null) ? Integer.parseInt(portOffset.trim()) : CARBON_DEFAULT_PORT_OFFSET); } catch (NumberFormatException e) { return CARBON_DEFAULT_PORT_OFFSET; } } /** * This applies the bindAddress from broker.xml instead of the hostname from carbon.xml within MB. * * @return host name as derived from broker.xml */ private String getTransportBindAddress() { return AndesConfigurationManager.readValue(AndesConfiguration.TRANSPORTS_BIND_ADDRESS); } /** * This applies the MQTTbindAddress from broker.xml instead of the hostname from carbon.xml within MB. * * @return host name as derived from broker.xml */ private String getMQTTTransportBindAddress() { return AndesConfigurationManager.readValue(AndesConfiguration.TRANSPORTS_MQTT_BIND_ADDRESS); } /** * This applies the AMQPbindAddress from broker.xml instead of the hostname from carbon.xml within MB. * * @return host name as derived from broker.xml */ private String getAMQPTransportBindAddress() { return AndesConfigurationManager.readValue(AndesConfiguration.TRANSPORTS_AMQP_BIND_ADDRESS); } /** * Start Andes Broker and related components with given configurations. * * @throws ConfigurationException * @throws AndesException */ private void startAndesBroker() throws ConfigurationException, AndesException { brokerShouldBeStarted = false; String dSetupValue = System.getProperty("setup"); if (dSetupValue != null) { // Source MB rdbms database if data source configurations and supported sql exist MessageBrokerDBUtil messageBrokerDBUtil = new MessageBrokerDBUtil(); messageBrokerDBUtil.initialize(); } // Start andes broker log.info("Activating Andes Message Broker Engine..."); System.setProperty(BrokerOptions.ANDES_HOME, qpidServiceImpl.getQpidHome()); String[] args = {"-p" + qpidServiceImpl.getAMQPPort(), "-s" + qpidServiceImpl.getAMQPSSLPort(), "-q" + qpidServiceImpl.getMqttPort()}; // TODO: Change the functionality in andes main method to an API // Main.setStandaloneMode(false); Main.main(args); // Remove Qpid shutdown hook so that I have control over shutting the broker down Runtime.getRuntime().removeShutdownHook(ApplicationRegistry.getShutdownHook()); // Wait until the broker has started while (!isBrokerRunning()) { try { Thread.sleep(500); } catch (InterruptedException ignore) { // ignore } } // TODO: Have to re-structure how andes broker getting started. // there should be a separate andes-core component to initialize Andes Broker. Within // that component both Qpid and MQTT components should initialized. // Start AMQP server with given configurations startAMQPServer(); // Start MQTT Server with given configurations startMQTTServer(); // Message broker is started with both AMQP and MQTT log.info("WSO2 Message Broker is started."); // Publish Qpid properties registrations.push(bundleContext.registerService(QpidService.class.getName(), qpidServiceImpl, null)); Integer brokerPort; if (qpidServiceImpl.getIfSSLOnly()) { brokerPort = qpidServiceImpl.getAMQPSSLPort(); } else { brokerPort = qpidServiceImpl.getAMQPPort(); } QpidServerDetails qpidServerDetails = new QpidServerDetails(qpidServiceImpl.getAccessKey(), qpidServiceImpl .getClientID(), qpidServiceImpl.getVirtualHostName(), qpidServiceImpl.getHostname(), brokerPort .toString(), qpidServiceImpl.getIfSSLOnly()); QpidServiceDataHolder.getInstance().getEventBundleNotificationService().notifyStart(qpidServerDetails); } /** * check whether the tcp port has started. some times the server started thread may return * before Qpid server actually bind to the tcp port. in that case there are some connection * time out issues. * * @throws ConfigurationException */ private void startAMQPServer() throws ConfigurationException { boolean isServerStarted = false; int port; if (qpidServiceImpl.getIfSSLOnly()) { port = qpidServiceImpl.getAMQPSSLPort(); } else { port = qpidServiceImpl.getAMQPPort(); } if (AndesConfigurationManager.<Boolean>readValue(AndesConfiguration.TRANSPORTS_AMQP_ENABLED)) { while (!isServerStarted) { Socket socket = null; try { InetAddress address = InetAddress.getByName(getAMQPTransportBindAddress()); socket = new Socket(address, port); log.info("AMQP Host Address : " + address.getHostAddress() + " Port : " + port); isServerStarted = socket.isConnected(); if (isServerStarted) { log.info("Successfully connected to AMQP server " + "on port " + port); } } catch (IOException e) { log.error("Wait until Qpid server starts on port " + port, e); try { Thread.sleep(500); } catch (InterruptedException ignore) { // Ignore } } finally { try { if ((socket != null) && (socket.isConnected())) { socket.close(); } } catch (IOException e) { log.error("Can not close the socket which is used to check the server " + "status ", e); } } } } else { log.warn("AMQP Transport is disabled as per configuration."); } } /** * check whether the tcp port has started. some times the server started thread may return * before MQTT server actually bind to the tcp port. in that case there are some connection * time out issues. * * @throws ConfigurationException */ private void startMQTTServer() throws ConfigurationException { boolean isServerStarted = false; int port; if (qpidServiceImpl.getMQTTSSLOnly()) { port = qpidServiceImpl.getMqttSSLPort(); } else { port = qpidServiceImpl.getMqttPort(); } if (AndesConfigurationManager.<Boolean>readValue(AndesConfiguration.TRANSPORTS_MQTT_ENABLED)) { while (!isServerStarted) { Socket socket = null; try { InetAddress address = InetAddress.getByName(getMQTTTransportBindAddress()); socket = new Socket(address, port); log.info("MQTT Host Address : " + address.getHostAddress() + " Port : " + port); isServerStarted = socket.isConnected(); if (isServerStarted) { log.info("Successfully connected to MQTT server on port " + port); } } catch (IOException e) { log.error("Wait until server starts on port " + port, e); try { Thread.sleep(500); } catch (InterruptedException ignore) { // Ignore } } finally { try { if ((socket != null) && (socket.isConnected())) { socket.close(); } } catch (IOException e) { log.error("Can not close the socket which is used to check the server " + "status ", e); } } } } else { if (log.isDebugEnabled()) { log.debug("MQTT Transport is disabled as per configuration."); } } } private static class MBShutdownHandler implements ServerShutdownHandler, ServerRestartHandler { @Override public void invoke() { try { // executing pre-shutdown work for registered listeners before shutting down the andes server for (BrokerLifecycleListener listener : QpidServiceDataHolder.getInstance() .getBrokerLifecycleListeners()) { listener.onShuttingdown(); } AndesKernelBoot.shutDownAndesKernel(); // executing post-shutdown work for registered listeners after shutting down the andes server for (BrokerLifecycleListener listener : QpidServiceDataHolder.getInstance() .getBrokerLifecycleListeners()) { listener.onShutdown(); } } catch (AndesException e) { log.error("Error while shutting down Andes kernel. ", e); } } } }
/* Copyright 1995-2015 Esri Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. For additional information, contact: Environmental Systems Research Institute, Inc. Attn: Contracts Dept 380 New York Street Redlands, California, USA 92373 email: contracts@esri.com */ package com.esri.geoevent.test.performance.jaxb; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElements; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; @XmlRootElement(name = "Simulation") public class Simulation implements Applicable<Simulation> { private Test test; @XmlElements( { @XmlElement( name="RampTest", type = RampTest.class ), @XmlElement( name="StressTest", type = StressTest.class), @XmlElement( name="TimeTest", type = TimeTest.class ) } ) public Test getTest() { return test; } public void setTest(Test test) { this.test = test; } //--------------------------------------------------------------- // Helper Methods //--------------------------------------------------------------- @XmlTransient public int getEventsPerSec() { if( getTest() instanceof TimeTest ) { return ((TimeTest)getTest()).getEventsPerSec(); } return -1; } @XmlTransient public int getIterations() { switch( getTest().getType() ) { case STRESS: return ((StressTest)getTest()).getIterations(); case TIME: case RAMP: default: return 1; } } @XmlTransient public int getMinEvents() { switch( getTest().getType() ) { case RAMP: return ((RampTest)getTest()).getMinEvents(); case TIME: return ((TimeTest)getTest()).getEventsPerSec() * ((TimeTest)getTest()).getTotalTimeInSec(); case STRESS: return ((StressTest)getTest()).getNumOfEvents(); default: return 1; } } @XmlTransient public int getMaxEvents() { switch( getTest().getType() ) { case RAMP: return ((RampTest)getTest()).getMaxEvents(); case TIME: return ((TimeTest)getTest()).getEventsPerSec() * ((TimeTest)getTest()).getTotalTimeInSec(); case STRESS: return ((StressTest)getTest()).getNumOfEvents(); default: return 1; } } @XmlTransient public int getExpectedResultCount() { switch( getTest().getType() ) { case RAMP: return ((RampTest)getTest()).getExpectedResultCountPerTest(); case TIME: TimeTest timeTest = (TimeTest)getTest(); if( timeTest.getExpectedResultCountPerSec() == -1 ) return timeTest.getEventsPerSec() * timeTest.getTotalTimeInSec(); else return timeTest.getExpectedResultCountPerSec() * timeTest.getTotalTimeInSec(); case STRESS: return ((StressTest)getTest()).getExpectedResultCount(); default: return 1; } } @XmlTransient public int getEventsToAddPerIteration() { switch( getTest().getType() ) { case RAMP: return ((RampTest)getTest()).getEventsToAddPerTest(); case TIME: return ((TimeTest)getTest()).getEventsPerSec(); case STRESS: return 0; default: return 1; } } @XmlTransient public int getStaggeringInterval() { if( getTest() instanceof TimeTest ) { return ((TimeTest)getTest()).getStaggeringInterval(); } return 1; } @Override public void apply(Simulation simulation) { if( simulation == null ) return; // apply the test if( simulation.getTest() != null ) { if( getTest() != null ) { getTest().apply( simulation.getTest() ); } } } public Simulation copy() { Simulation copy = new Simulation(); if( getTest() != null ) copy.setTest(getTest().copy()); return copy; } @Override public boolean equals(Object obj) { if (obj == null || !(obj instanceof Simulation)) return false; Simulation simulation = (Simulation) obj; if (!ObjectUtils.equals(getTest(), simulation.getTest())) return false; return true; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/texttospeech/v1beta1/cloud_tts.proto package com.google.cloud.texttospeech.v1beta1; /** * * * <pre> * This contains a mapping between a certain point in the input text and a * corresponding time in the output audio. * </pre> * * Protobuf type {@code google.cloud.texttospeech.v1beta1.Timepoint} */ public final class Timepoint extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.texttospeech.v1beta1.Timepoint) TimepointOrBuilder { private static final long serialVersionUID = 0L; // Use Timepoint.newBuilder() to construct. private Timepoint(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Timepoint() { markName_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Timepoint(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Timepoint( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 25: { timeSeconds_ = input.readDouble(); break; } case 34: { java.lang.String s = input.readStringRequireUtf8(); markName_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1beta1_Timepoint_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1beta1_Timepoint_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.texttospeech.v1beta1.Timepoint.class, com.google.cloud.texttospeech.v1beta1.Timepoint.Builder.class); } public static final int MARK_NAME_FIELD_NUMBER = 4; private volatile java.lang.Object markName_; /** * * * <pre> * Timepoint name as received from the client within `&lt;mark&gt;` tag. * </pre> * * <code>string mark_name = 4;</code> * * @return The markName. */ @java.lang.Override public java.lang.String getMarkName() { java.lang.Object ref = markName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); markName_ = s; return s; } } /** * * * <pre> * Timepoint name as received from the client within `&lt;mark&gt;` tag. * </pre> * * <code>string mark_name = 4;</code> * * @return The bytes for markName. */ @java.lang.Override public com.google.protobuf.ByteString getMarkNameBytes() { java.lang.Object ref = markName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); markName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TIME_SECONDS_FIELD_NUMBER = 3; private double timeSeconds_; /** * * * <pre> * Time offset in seconds from the start of the synthesized audio. * </pre> * * <code>double time_seconds = 3;</code> * * @return The timeSeconds. */ @java.lang.Override public double getTimeSeconds() { return timeSeconds_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (timeSeconds_ != 0D) { output.writeDouble(3, timeSeconds_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(markName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, markName_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (timeSeconds_ != 0D) { size += com.google.protobuf.CodedOutputStream.computeDoubleSize(3, timeSeconds_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(markName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, markName_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.texttospeech.v1beta1.Timepoint)) { return super.equals(obj); } com.google.cloud.texttospeech.v1beta1.Timepoint other = (com.google.cloud.texttospeech.v1beta1.Timepoint) obj; if (!getMarkName().equals(other.getMarkName())) return false; if (java.lang.Double.doubleToLongBits(getTimeSeconds()) != java.lang.Double.doubleToLongBits(other.getTimeSeconds())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + MARK_NAME_FIELD_NUMBER; hash = (53 * hash) + getMarkName().hashCode(); hash = (37 * hash) + TIME_SECONDS_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( java.lang.Double.doubleToLongBits(getTimeSeconds())); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.texttospeech.v1beta1.Timepoint parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1beta1.Timepoint parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1beta1.Timepoint parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1beta1.Timepoint parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1beta1.Timepoint parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1beta1.Timepoint parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1beta1.Timepoint parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1beta1.Timepoint parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.texttospeech.v1beta1.Timepoint parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1beta1.Timepoint parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.texttospeech.v1beta1.Timepoint parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1beta1.Timepoint parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.texttospeech.v1beta1.Timepoint prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * This contains a mapping between a certain point in the input text and a * corresponding time in the output audio. * </pre> * * Protobuf type {@code google.cloud.texttospeech.v1beta1.Timepoint} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.texttospeech.v1beta1.Timepoint) com.google.cloud.texttospeech.v1beta1.TimepointOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1beta1_Timepoint_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1beta1_Timepoint_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.texttospeech.v1beta1.Timepoint.class, com.google.cloud.texttospeech.v1beta1.Timepoint.Builder.class); } // Construct using com.google.cloud.texttospeech.v1beta1.Timepoint.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); markName_ = ""; timeSeconds_ = 0D; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1beta1_Timepoint_descriptor; } @java.lang.Override public com.google.cloud.texttospeech.v1beta1.Timepoint getDefaultInstanceForType() { return com.google.cloud.texttospeech.v1beta1.Timepoint.getDefaultInstance(); } @java.lang.Override public com.google.cloud.texttospeech.v1beta1.Timepoint build() { com.google.cloud.texttospeech.v1beta1.Timepoint result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.texttospeech.v1beta1.Timepoint buildPartial() { com.google.cloud.texttospeech.v1beta1.Timepoint result = new com.google.cloud.texttospeech.v1beta1.Timepoint(this); result.markName_ = markName_; result.timeSeconds_ = timeSeconds_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.texttospeech.v1beta1.Timepoint) { return mergeFrom((com.google.cloud.texttospeech.v1beta1.Timepoint) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.texttospeech.v1beta1.Timepoint other) { if (other == com.google.cloud.texttospeech.v1beta1.Timepoint.getDefaultInstance()) return this; if (!other.getMarkName().isEmpty()) { markName_ = other.markName_; onChanged(); } if (other.getTimeSeconds() != 0D) { setTimeSeconds(other.getTimeSeconds()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.texttospeech.v1beta1.Timepoint parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.texttospeech.v1beta1.Timepoint) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object markName_ = ""; /** * * * <pre> * Timepoint name as received from the client within `&lt;mark&gt;` tag. * </pre> * * <code>string mark_name = 4;</code> * * @return The markName. */ public java.lang.String getMarkName() { java.lang.Object ref = markName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); markName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Timepoint name as received from the client within `&lt;mark&gt;` tag. * </pre> * * <code>string mark_name = 4;</code> * * @return The bytes for markName. */ public com.google.protobuf.ByteString getMarkNameBytes() { java.lang.Object ref = markName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); markName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Timepoint name as received from the client within `&lt;mark&gt;` tag. * </pre> * * <code>string mark_name = 4;</code> * * @param value The markName to set. * @return This builder for chaining. */ public Builder setMarkName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } markName_ = value; onChanged(); return this; } /** * * * <pre> * Timepoint name as received from the client within `&lt;mark&gt;` tag. * </pre> * * <code>string mark_name = 4;</code> * * @return This builder for chaining. */ public Builder clearMarkName() { markName_ = getDefaultInstance().getMarkName(); onChanged(); return this; } /** * * * <pre> * Timepoint name as received from the client within `&lt;mark&gt;` tag. * </pre> * * <code>string mark_name = 4;</code> * * @param value The bytes for markName to set. * @return This builder for chaining. */ public Builder setMarkNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); markName_ = value; onChanged(); return this; } private double timeSeconds_; /** * * * <pre> * Time offset in seconds from the start of the synthesized audio. * </pre> * * <code>double time_seconds = 3;</code> * * @return The timeSeconds. */ @java.lang.Override public double getTimeSeconds() { return timeSeconds_; } /** * * * <pre> * Time offset in seconds from the start of the synthesized audio. * </pre> * * <code>double time_seconds = 3;</code> * * @param value The timeSeconds to set. * @return This builder for chaining. */ public Builder setTimeSeconds(double value) { timeSeconds_ = value; onChanged(); return this; } /** * * * <pre> * Time offset in seconds from the start of the synthesized audio. * </pre> * * <code>double time_seconds = 3;</code> * * @return This builder for chaining. */ public Builder clearTimeSeconds() { timeSeconds_ = 0D; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.texttospeech.v1beta1.Timepoint) } // @@protoc_insertion_point(class_scope:google.cloud.texttospeech.v1beta1.Timepoint) private static final com.google.cloud.texttospeech.v1beta1.Timepoint DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.texttospeech.v1beta1.Timepoint(); } public static com.google.cloud.texttospeech.v1beta1.Timepoint getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Timepoint> PARSER = new com.google.protobuf.AbstractParser<Timepoint>() { @java.lang.Override public Timepoint parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Timepoint(input, extensionRegistry); } }; public static com.google.protobuf.Parser<Timepoint> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Timepoint> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.texttospeech.v1beta1.Timepoint getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2013 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.handler.codec.PrematureChannelClosureException; import io.netty.handler.codec.TooLongFrameException; import io.netty.handler.codec.http.HttpHeaders.Names; import io.netty.util.CharsetUtil; import org.junit.Test; import java.util.Arrays; import java.util.List; import static org.hamcrest.CoreMatchers.*; import static org.junit.Assert.*; public class HttpResponseDecoderTest { /** * The size of headers should be calculated correctly even if a single header is split into multiple fragments. * @see <a href="https://github.com/netty/netty/issues/3445">#3445</a> */ @Test public void testMaxHeaderSize1() { final int maxHeaderSize = 8192; final EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder(4096, maxHeaderSize, 8192)); final char[] bytes = new char[maxHeaderSize / 2 - 2]; Arrays.fill(bytes, 'a'); ch.writeInbound(Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\n", CharsetUtil.US_ASCII)); // Write two 4096-byte headers (= 8192 bytes) ch.writeInbound(Unpooled.copiedBuffer("A:", CharsetUtil.US_ASCII)); ch.writeInbound(Unpooled.copiedBuffer(bytes, CharsetUtil.US_ASCII)); ch.writeInbound(Unpooled.copiedBuffer("\r\n", CharsetUtil.US_ASCII)); assertNull(ch.readInbound()); ch.writeInbound(Unpooled.copiedBuffer("B:", CharsetUtil.US_ASCII)); ch.writeInbound(Unpooled.copiedBuffer(bytes, CharsetUtil.US_ASCII)); ch.writeInbound(Unpooled.copiedBuffer("\r\n", CharsetUtil.US_ASCII)); ch.writeInbound(Unpooled.copiedBuffer("\r\n", CharsetUtil.US_ASCII)); HttpResponse res = (HttpResponse) ch.readInbound(); assertNull(res.getDecoderResult().cause()); assertTrue(res.getDecoderResult().isSuccess()); assertNull(ch.readInbound()); assertTrue(ch.finish()); assertThat(ch.readInbound(), instanceOf(LastHttpContent.class)); } /** * Complementary test case of {@link #testMaxHeaderSize1()}. When it actually exceeds the maximum, it should fail. */ @Test public void testMaxHeaderSize2() { final int maxHeaderSize = 8192; final EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder(4096, maxHeaderSize, 8192)); final char[] bytes = new char[maxHeaderSize / 2 - 2]; Arrays.fill(bytes, 'a'); ch.writeInbound(Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\n", CharsetUtil.US_ASCII)); // Write a 4096-byte header and a 4097-byte header to test an off-by-one case (= 8193 bytes) ch.writeInbound(Unpooled.copiedBuffer("A:", CharsetUtil.US_ASCII)); ch.writeInbound(Unpooled.copiedBuffer(bytes, CharsetUtil.US_ASCII)); ch.writeInbound(Unpooled.copiedBuffer("\r\n", CharsetUtil.US_ASCII)); assertNull(ch.readInbound()); ch.writeInbound(Unpooled.copiedBuffer("B: ", CharsetUtil.US_ASCII)); // Note an extra space. ch.writeInbound(Unpooled.copiedBuffer(bytes, CharsetUtil.US_ASCII)); ch.writeInbound(Unpooled.copiedBuffer("\r\n", CharsetUtil.US_ASCII)); ch.writeInbound(Unpooled.copiedBuffer("\r\n", CharsetUtil.US_ASCII)); HttpResponse res = (HttpResponse) ch.readInbound(); assertTrue(res.getDecoderResult().cause() instanceof TooLongFrameException); assertFalse(ch.finish()); assertNull(ch.readInbound()); } @Test public void testResponseChunked() { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n", CharsetUtil.US_ASCII)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); byte[] data = new byte[64]; for (int i = 0; i < data.length; i++) { data[i] = (byte) i; } for (int i = 0; i < 10; i++) { assertFalse(ch.writeInbound(Unpooled.copiedBuffer(Integer.toHexString(data.length) + "\r\n", CharsetUtil.US_ASCII))); assertTrue(ch.writeInbound(Unpooled.wrappedBuffer(data))); HttpContent content = (HttpContent) ch.readInbound(); assertEquals(data.length, content.content().readableBytes()); byte[] decodedData = new byte[data.length]; content.content().readBytes(decodedData); assertArrayEquals(data, decodedData); content.release(); assertFalse(ch.writeInbound(Unpooled.copiedBuffer("\r\n", CharsetUtil.US_ASCII))); } // Write the last chunk. ch.writeInbound(Unpooled.copiedBuffer("0\r\n\r\n", CharsetUtil.US_ASCII)); // Ensure the last chunk was decoded. LastHttpContent content = (LastHttpContent) ch.readInbound(); assertFalse(content.content().isReadable()); content.release(); ch.finish(); assertNull(ch.readInbound()); } @Test public void testResponseChunkedExceedMaxChunkSize() { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder(4096, 8192, 32)); ch.writeInbound( Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n", CharsetUtil.US_ASCII)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); byte[] data = new byte[64]; for (int i = 0; i < data.length; i++) { data[i] = (byte) i; } for (int i = 0; i < 10; i++) { assertFalse(ch.writeInbound(Unpooled.copiedBuffer(Integer.toHexString(data.length) + "\r\n", CharsetUtil.US_ASCII))); assertTrue(ch.writeInbound(Unpooled.wrappedBuffer(data))); byte[] decodedData = new byte[data.length]; HttpContent content = (HttpContent) ch.readInbound(); assertEquals(32, content.content().readableBytes()); content.content().readBytes(decodedData, 0, 32); content.release(); content = (HttpContent) ch.readInbound(); assertEquals(32, content.content().readableBytes()); content.content().readBytes(decodedData, 32, 32); assertArrayEquals(data, decodedData); content.release(); assertFalse(ch.writeInbound(Unpooled.copiedBuffer("\r\n", CharsetUtil.US_ASCII))); } // Write the last chunk. ch.writeInbound(Unpooled.copiedBuffer("0\r\n\r\n", CharsetUtil.US_ASCII)); // Ensure the last chunk was decoded. LastHttpContent content = (LastHttpContent) ch.readInbound(); assertFalse(content.content().isReadable()); content.release(); ch.finish(); assertNull(ch.readInbound()); } @Test public void testClosureWithoutContentLength1() throws Exception { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\n\r\n", CharsetUtil.US_ASCII)); // Read the response headers. HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); assertThat(ch.readInbound(), is(nullValue())); // Close the connection without sending anything. assertTrue(ch.finish()); // The decoder should still produce the last content. LastHttpContent content = (LastHttpContent) ch.readInbound(); assertThat(content.content().isReadable(), is(false)); content.release(); // But nothing more. assertThat(ch.readInbound(), is(nullValue())); } @Test public void testClosureWithoutContentLength2() throws Exception { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); // Write the partial response. ch.writeInbound(Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\n\r\n12345678", CharsetUtil.US_ASCII)); // Read the response headers. HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); // Read the partial content. HttpContent content = (HttpContent) ch.readInbound(); assertThat(content.content().toString(CharsetUtil.US_ASCII), is("12345678")); assertThat(content, is(not(instanceOf(LastHttpContent.class)))); content.release(); assertThat(ch.readInbound(), is(nullValue())); // Close the connection. assertTrue(ch.finish()); // The decoder should still produce the last content. LastHttpContent lastContent = (LastHttpContent) ch.readInbound(); assertThat(lastContent.content().isReadable(), is(false)); lastContent.release(); // But nothing more. assertThat(ch.readInbound(), is(nullValue())); } @Test public void testPrematureClosureWithChunkedEncoding1() throws Exception { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound( Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n", CharsetUtil.US_ASCII)); // Read the response headers. HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); assertThat(res.headers().get(Names.TRANSFER_ENCODING), is("chunked")); assertThat(ch.readInbound(), is(nullValue())); // Close the connection without sending anything. ch.finish(); // The decoder should not generate the last chunk because it's closed prematurely. assertThat(ch.readInbound(), is(nullValue())); } @Test public void testPrematureClosureWithChunkedEncoding2() throws Exception { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); // Write the partial response. ch.writeInbound(Unpooled.copiedBuffer( "HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n8\r\n12345678", CharsetUtil.US_ASCII)); // Read the response headers. HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); assertThat(res.headers().get(Names.TRANSFER_ENCODING), is("chunked")); // Read the partial content. HttpContent content = (HttpContent) ch.readInbound(); assertThat(content.content().toString(CharsetUtil.US_ASCII), is("12345678")); assertThat(content, is(not(instanceOf(LastHttpContent.class)))); content.release(); assertThat(ch.readInbound(), is(nullValue())); // Close the connection. ch.finish(); // The decoder should not generate the last chunk because it's closed prematurely. assertThat(ch.readInbound(), is(nullValue())); } @Test public void testLastResponseWithEmptyHeaderAndEmptyContent() { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\n\r\n", CharsetUtil.US_ASCII)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); assertThat(ch.readInbound(), is(nullValue())); assertThat(ch.finish(), is(true)); LastHttpContent content = (LastHttpContent) ch.readInbound(); assertThat(content.content().isReadable(), is(false)); content.release(); assertThat(ch.readInbound(), is(nullValue())); } @Test public void testLastResponseWithoutContentLengthHeader() { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\n\r\n", CharsetUtil.US_ASCII)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); assertThat(ch.readInbound(), is(nullValue())); ch.writeInbound(Unpooled.wrappedBuffer(new byte[1024])); HttpContent content = (HttpContent) ch.readInbound(); assertThat(content.content().readableBytes(), is(1024)); content.release(); assertThat(ch.finish(), is(true)); LastHttpContent lastContent = (LastHttpContent) ch.readInbound(); assertThat(lastContent.content().isReadable(), is(false)); lastContent.release(); assertThat(ch.readInbound(), is(nullValue())); } @Test public void testLastResponseWithTrailingHeader() { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.copiedBuffer( "HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n" + "0\r\n" + "Set-Cookie: t1=t1v1\r\n" + "Set-Cookie: t2=t2v2; Expires=Wed, 09-Jun-2021 10:18:14 GMT\r\n" + "\r\n", CharsetUtil.US_ASCII)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); LastHttpContent lastContent = (LastHttpContent) ch.readInbound(); assertThat(lastContent.content().isReadable(), is(false)); HttpHeaders headers = lastContent.trailingHeaders(); assertEquals(1, headers.names().size()); List<String> values = headers.getAll("Set-Cookie"); assertEquals(2, values.size()); assertTrue(values.contains("t1=t1v1")); assertTrue(values.contains("t2=t2v2; Expires=Wed, 09-Jun-2021 10:18:14 GMT")); lastContent.release(); assertThat(ch.finish(), is(false)); assertThat(ch.readInbound(), is(nullValue())); } @Test public void testLastResponseWithTrailingHeaderFragmented() { byte[] data = ("HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n" + "0\r\n" + "Set-Cookie: t1=t1v1\r\n" + "Set-Cookie: t2=t2v2; Expires=Wed, 09-Jun-2021 10:18:14 GMT\r\n" + "\r\n").getBytes(CharsetUtil.US_ASCII); for (int i = 1; i < data.length; i++) { testLastResponseWithTrailingHeaderFragmented(data, i); } } private static void testLastResponseWithTrailingHeaderFragmented(byte[] content, int fragmentSize) { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); int headerLength = 47; // split up the header for (int a = 0; a < headerLength;) { int amount = fragmentSize; if (a + amount > headerLength) { amount = headerLength - a; } // if header is done it should produce a HttpRequest boolean headerDone = a + amount == headerLength; assertEquals(headerDone, ch.writeInbound(Unpooled.wrappedBuffer(content, a, amount))); a += amount; } ch.writeInbound(Unpooled.wrappedBuffer(content, headerLength, content.length - headerLength)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); LastHttpContent lastContent = (LastHttpContent) ch.readInbound(); assertThat(lastContent.content().isReadable(), is(false)); HttpHeaders headers = lastContent.trailingHeaders(); assertEquals(1, headers.names().size()); List<String> values = headers.getAll("Set-Cookie"); assertEquals(2, values.size()); assertTrue(values.contains("t1=t1v1")); assertTrue(values.contains("t2=t2v2; Expires=Wed, 09-Jun-2021 10:18:14 GMT")); lastContent.release(); assertThat(ch.finish(), is(false)); assertThat(ch.readInbound(), is(nullValue())); } @Test public void testResponseWithContentLength() { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.copiedBuffer( "HTTP/1.1 200 OK\r\n" + "Content-Length: 10\r\n" + "\r\n", CharsetUtil.US_ASCII)); byte[] data = new byte[10]; for (int i = 0; i < data.length; i++) { data[i] = (byte) i; } ch.writeInbound(Unpooled.wrappedBuffer(data, 0, data.length / 2)); ch.writeInbound(Unpooled.wrappedBuffer(data, 5, data.length / 2)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); HttpContent firstContent = (HttpContent) ch.readInbound(); assertThat(firstContent.content().readableBytes(), is(5)); assertEquals(Unpooled.wrappedBuffer(data, 0, 5), firstContent.content()); firstContent.release(); LastHttpContent lastContent = (LastHttpContent) ch.readInbound(); assertEquals(5, lastContent.content().readableBytes()); assertEquals(Unpooled.wrappedBuffer(data, 5, 5), lastContent.content()); lastContent.release(); assertThat(ch.finish(), is(false)); assertThat(ch.readInbound(), is(nullValue())); } @Test public void testResponseWithContentLengthFragmented() { byte[] data = ("HTTP/1.1 200 OK\r\n" + "Content-Length: 10\r\n" + "\r\n").getBytes(CharsetUtil.US_ASCII); for (int i = 1; i < data.length; i++) { testResponseWithContentLengthFragmented(data, i); } } private static void testResponseWithContentLengthFragmented(byte[] header, int fragmentSize) { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); // split up the header for (int a = 0; a < header.length;) { int amount = fragmentSize; if (a + amount > header.length) { amount = header.length - a; } ch.writeInbound(Unpooled.wrappedBuffer(header, a, amount)); a += amount; } byte[] data = new byte[10]; for (int i = 0; i < data.length; i++) { data[i] = (byte) i; } ch.writeInbound(Unpooled.wrappedBuffer(data, 0, data.length / 2)); ch.writeInbound(Unpooled.wrappedBuffer(data, 5, data.length / 2)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); HttpContent firstContent = (HttpContent) ch.readInbound(); assertThat(firstContent.content().readableBytes(), is(5)); assertEquals(Unpooled.wrappedBuffer(data, 0, 5), firstContent.content()); firstContent.release(); LastHttpContent lastContent = (LastHttpContent) ch.readInbound(); assertEquals(5, lastContent.content().readableBytes()); assertEquals(Unpooled.wrappedBuffer(data, 5, 5), lastContent.content()); lastContent.release(); assertThat(ch.finish(), is(false)); assertThat(ch.readInbound(), is(nullValue())); } @Test public void testWebSocketResponse() { byte[] data = ("HTTP/1.1 101 WebSocket Protocol Handshake\r\n" + "Upgrade: WebSocket\r\n" + "Connection: Upgrade\r\n" + "Sec-WebSocket-Origin: http://localhost:8080\r\n" + "Sec-WebSocket-Location: ws://localhost/some/path\r\n" + "\r\n" + "1234567812345678").getBytes(); EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.wrappedBuffer(data)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.SWITCHING_PROTOCOLS)); HttpContent content = (HttpContent) ch.readInbound(); assertThat(content.content().readableBytes(), is(16)); content.release(); assertThat(ch.finish(), is(false)); assertThat(ch.readInbound(), is(nullValue())); } // See https://github.com/netty/netty/issues/2173 @Test public void testWebSocketResponseWithDataFollowing() { byte[] data = ("HTTP/1.1 101 WebSocket Protocol Handshake\r\n" + "Upgrade: WebSocket\r\n" + "Connection: Upgrade\r\n" + "Sec-WebSocket-Origin: http://localhost:8080\r\n" + "Sec-WebSocket-Location: ws://localhost/some/path\r\n" + "\r\n" + "1234567812345678").getBytes(); byte[] otherData = {1, 2, 3, 4}; EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.wrappedBuffer(data, otherData)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.SWITCHING_PROTOCOLS)); HttpContent content = (HttpContent) ch.readInbound(); assertThat(content.content().readableBytes(), is(16)); content.release(); assertThat(ch.finish(), is(true)); ByteBuf expected = Unpooled.wrappedBuffer(otherData); ByteBuf buffer = (ByteBuf) ch.readInbound(); try { assertEquals(expected, buffer); } finally { expected.release(); if (buffer != null) { buffer.release(); } } } @Test public void testGarbageHeaders() { // A response without headers - from https://github.com/netty/netty/issues/2103 byte[] data = ("<html>\r\n" + "<head><title>400 Bad Request</title></head>\r\n" + "<body bgcolor=\"white\">\r\n" + "<center><h1>400 Bad Request</h1></center>\r\n" + "<hr><center>nginx/1.1.19</center>\r\n" + "</body>\r\n" + "</html>\r\n").getBytes(); EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.wrappedBuffer(data)); // Garbage input should generate the 999 Unknown response. HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_0)); assertThat(res.getStatus().code(), is(999)); assertThat(res.getDecoderResult().isFailure(), is(true)); assertThat(res.getDecoderResult().isFinished(), is(true)); assertThat(ch.readInbound(), is(nullValue())); // More garbage should not generate anything (i.e. the decoder discards anything beyond this point.) ch.writeInbound(Unpooled.wrappedBuffer(data)); assertThat(ch.readInbound(), is(nullValue())); // Closing the connection should not generate anything since the protocol has been violated. ch.finish(); assertThat(ch.readInbound(), is(nullValue())); } /** * Tests if the decoder produces one and only {@link LastHttpContent} when an invalid chunk is received and * the connection is closed. */ @Test public void testGarbageChunk() { EmbeddedChannel channel = new EmbeddedChannel(new HttpResponseDecoder()); String responseWithIllegalChunk = "HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n\r\n" + "NOT_A_CHUNK_LENGTH\r\n"; channel.writeInbound(Unpooled.copiedBuffer(responseWithIllegalChunk, CharsetUtil.US_ASCII)); assertThat(channel.readInbound(), is(instanceOf(HttpResponse.class))); // Ensure that the decoder generates the last chunk with correct decoder result. LastHttpContent invalidChunk = (LastHttpContent) channel.readInbound(); assertThat(invalidChunk.getDecoderResult().isFailure(), is(true)); invalidChunk.release(); // And no more messages should be produced by the decoder. assertThat(channel.readInbound(), is(nullValue())); // .. even after the connection is closed. assertThat(channel.finish(), is(false)); } @Test public void testConnectionClosedBeforeHeadersReceived() { EmbeddedChannel channel = new EmbeddedChannel(new HttpResponseDecoder()); String responseInitialLine = "HTTP/1.1 200 OK\r\n"; assertFalse(channel.writeInbound(Unpooled.copiedBuffer(responseInitialLine, CharsetUtil.US_ASCII))); assertTrue(channel.finish()); HttpMessage message = (HttpMessage) channel.readInbound(); assertTrue(message.getDecoderResult().isFailure()); assertThat(message.getDecoderResult().cause(), instanceOf(PrematureChannelClosureException.class)); assertNull(channel.readInbound()); } }
// PART OF THE MACHINE SIMULATION. DO NOT CHANGE. package nachos.machine; import nachos.security.*; import nachos.ag.*; import java.io.File; /** * The master class of the simulated machine. Processes command line arguments, * constructs all simulated hardware devices, and starts the grader. */ public final class Machine { /** * Nachos main entry point. * * @param args the command line arguments. */ public static void main(final String[] args) { System.out.print("nachos 5.0j initializing..."); Lib.assertTrue(Machine.args == null); Machine.args = args; processArgs(); Config.load(configFileName); // get the current directory (.) baseDirectory = new File(new File("").getAbsolutePath()); // get the nachos directory (./nachos) nachosDirectory = new File(baseDirectory, "nachos"); String testDirectoryName = Config.getString("FileSystem.testDirectory"); // get the test directory if (testDirectoryName != null) { testDirectory = new File(testDirectoryName); } else { // use ../test testDirectory = new File(baseDirectory.getParentFile(), "test"); } securityManager = new NachosSecurityManager(testDirectory); privilege = securityManager.getPrivilege(); privilege.machine = new MachinePrivilege(); TCB.givePrivilege(privilege); privilege.stats = stats; securityManager.enable(); createDevices(); checkUserClasses(); autoGrader = (AutoGrader) Lib.constructObject(autoGraderClassName); new TCB().start(new Runnable() { public void run() { autoGrader.start(privilege); } }); } /** * Yield to non-Nachos threads. Use in non-preemptive JVM's to give * non-Nachos threads a chance to run. */ public static void yield() { Thread.yield(); } /** * Terminate Nachos. Same as <tt>TCB.die()</tt>. */ public static void terminate() { TCB.die(); } /** * Terminate Nachos as the result of an unhandled exception or error. * * @param e the exception or error. */ public static void terminate(Throwable e) { if (e instanceof ThreadDeath) throw (ThreadDeath) e; e.printStackTrace(); terminate(); } /** * Print stats, and terminate Nachos. */ public static void halt() { System.out.print("Machine halting!\n\n"); stats.print(); terminate(); } /** * Return an array containing all command line arguments. * * @return the command line arguments passed to Nachos. */ public static String[] getCommandLineArguments() { String[] result = new String[args.length]; System.arraycopy(args, 0, result, 0, args.length); return result; } private static void processArgs() { for (int i = 0; i < args.length;) { String arg = args[i++]; if (arg.length() > 0 && arg.charAt(0) == '-') { if (arg.equals("-d")) { Lib.assertTrue(i < args.length, "switch without argument"); Lib.enableDebugFlags(args[i++]); } else if (arg.equals("-h")) { System.out.print(help); System.exit(1); } else if (arg.equals("-m")) { Lib.assertTrue(i < args.length, "switch without argument"); try { numPhysPages = Integer.parseInt(args[i++]); } catch (NumberFormatException e) { Lib.assertNotReached("bad value for -m switch"); } } else if (arg.equals("-s")) { Lib.assertTrue(i < args.length, "switch without argument"); try { randomSeed = Long.parseLong(args[i++]); } catch (NumberFormatException e) { Lib.assertNotReached("bad value for -s switch"); } } else if (arg.equals("-x")) { Lib.assertTrue(i < args.length, "switch without argument"); shellProgramName = args[i++]; } else if (arg.equals("-z")) { System.out.print(copyright); System.exit(1); } // these switches are reserved for the autograder else if (arg.equals("-[]")) { Lib.assertTrue(i < args.length, "switch without argument"); configFileName = args[i++]; } else if (arg.equals("--")) { Lib.assertTrue(i < args.length, "switch without argument"); autoGraderClassName = args[i++]; } } } Lib.seedRandom(randomSeed); } private static void createDevices() { interrupt = new Interrupt(privilege); timer = new Timer(privilege); if (Config.getBoolean("Machine.bank")) bank = new ElevatorBank(privilege); if (Config.getBoolean("Machine.processor")) { if (numPhysPages == -1) numPhysPages = Config.getInteger("Processor.numPhysPages"); processor = new Processor(privilege, numPhysPages); } if (Config.getBoolean("Machine.console")) console = new StandardConsole(privilege); if (Config.getBoolean("Machine.stubFileSystem")) stubFileSystem = new StubFileSystem(privilege, testDirectory); if (Config.getBoolean("Machine.networkLink")) networkLink = new NetworkLink(privilege); } private static void checkUserClasses() { System.out.print(" user-check"); Class aclsInt = (new int[0]).getClass(); Class clsObject = Lib.loadClass("java.lang.Object"); Class clsRunnable = Lib.loadClass("java.lang.Runnable"); Class clsString = Lib.loadClass("java.lang.String"); Class clsKernel = Lib.loadClass("nachos.machine.Kernel"); Class clsFileSystem = Lib.loadClass("nachos.machine.FileSystem"); Class clsRiderControls = Lib.loadClass("nachos.machine.RiderControls"); Class clsElevatorControls = Lib .loadClass("nachos.machine.ElevatorControls"); Class clsRiderInterface = Lib .loadClass("nachos.machine.RiderInterface"); Class clsElevatorControllerInterface = Lib .loadClass("nachos.machine.ElevatorControllerInterface"); Class clsAlarm = Lib.loadClass("nachos.threads.Alarm"); Class clsThreadedKernel = Lib .loadClass("nachos.threads.ThreadedKernel"); Class clsKThread = Lib.loadClass("nachos.threads.KThread"); Class clsCommunicator = Lib.loadClass("nachos.threads.Communicator"); Class clsSemaphore = Lib.loadClass("nachos.threads.Semaphore"); Class clsLock = Lib.loadClass("nachos.threads.Lock"); Class clsCondition = Lib.loadClass("nachos.threads.Condition"); Class clsCondition2 = Lib.loadClass("nachos.threads.Condition2"); Class clsRider = Lib.loadClass("nachos.threads.Rider"); Class clsElevatorController = Lib .loadClass("nachos.threads.ElevatorController"); Lib.checkDerivation(clsThreadedKernel, clsKernel); Lib.checkStaticField(clsThreadedKernel, "alarm", clsAlarm); Lib.checkStaticField(clsThreadedKernel, "fileSystem", clsFileSystem); Lib.checkMethod(clsAlarm, "waitUntil", new Class[] { long.class }, void.class); Lib.checkConstructor(clsKThread, new Class[] {}); Lib.checkConstructor(clsKThread, new Class[] { clsRunnable }); Lib.checkStaticMethod(clsKThread, "currentThread", new Class[] {}, clsKThread); Lib.checkStaticMethod(clsKThread, "finish", new Class[] {}, void.class); Lib.checkStaticMethod(clsKThread, "yield", new Class[] {}, void.class); Lib.checkStaticMethod(clsKThread, "sleep", new Class[] {}, void.class); Lib.checkMethod(clsKThread, "setTarget", new Class[] { clsRunnable }, clsKThread); Lib.checkMethod(clsKThread, "setName", new Class[] { clsString }, clsKThread); Lib.checkMethod(clsKThread, "getName", new Class[] {}, clsString); Lib.checkMethod(clsKThread, "fork", new Class[] {}, void.class); Lib.checkMethod(clsKThread, "ready", new Class[] {}, void.class); Lib.checkMethod(clsKThread, "join", new Class[] {}, void.class); Lib.checkField(clsKThread, "schedulingState", clsObject); Lib.checkConstructor(clsCommunicator, new Class[] {}); Lib.checkMethod(clsCommunicator, "speak", new Class[] { int.class }, void.class); Lib.checkMethod(clsCommunicator, "listen", new Class[] {}, int.class); Lib.checkConstructor(clsSemaphore, new Class[] { int.class }); Lib.checkMethod(clsSemaphore, "P", new Class[] {}, void.class); Lib.checkMethod(clsSemaphore, "V", new Class[] {}, void.class); Lib.checkConstructor(clsLock, new Class[] {}); Lib.checkMethod(clsLock, "acquire", new Class[] {}, void.class); Lib.checkMethod(clsLock, "release", new Class[] {}, void.class); Lib.checkMethod(clsLock, "isHeldByCurrentThread", new Class[] {}, boolean.class); Lib.checkConstructor(clsCondition, new Class[] { clsLock }); Lib.checkConstructor(clsCondition2, new Class[] { clsLock }); Lib.checkMethod(clsCondition, "sleep", new Class[] {}, void.class); Lib.checkMethod(clsCondition, "wake", new Class[] {}, void.class); Lib.checkMethod(clsCondition, "wakeAll", new Class[] {}, void.class); Lib.checkMethod(clsCondition2, "sleep", new Class[] {}, void.class); Lib.checkMethod(clsCondition2, "wake", new Class[] {}, void.class); Lib.checkMethod(clsCondition2, "wakeAll", new Class[] {}, void.class); Lib.checkDerivation(clsRider, clsRiderInterface); Lib.checkConstructor(clsRider, new Class[] {}); Lib.checkMethod(clsRider, "initialize", new Class[] { clsRiderControls, aclsInt }, void.class); Lib.checkDerivation(clsElevatorController, clsElevatorControllerInterface); Lib.checkConstructor(clsElevatorController, new Class[] {}); Lib.checkMethod(clsElevatorController, "initialize", new Class[] { clsElevatorControls }, void.class); } /** * Prevent instantiation. */ private Machine() { } /** * Return the hardware interrupt manager. * * @return the hardware interrupt manager. */ public static Interrupt interrupt() { return interrupt; } /** * Return the hardware timer. * * @return the hardware timer. */ public static Timer timer() { return timer; } /** * Return the hardware elevator bank. * * @return the hardware elevator bank, or <tt>null</tt> if it is not * present. */ public static ElevatorBank bank() { return bank; } /** * Return the MIPS processor. * * @return the MIPS processor, or <tt>null</tt> if it is not present. */ public static Processor processor() { return processor; } /** * Return the hardware console. * * @return the hardware console, or <tt>null</tt> if it is not present. */ public static SerialConsole console() { return console; } /** * Return the stub filesystem. * * @return the stub file system, or <tt>null</tt> if it is not present. */ public static FileSystem stubFileSystem() { return stubFileSystem; } /** * Return the network link. * * @return the network link, or <tt>null</tt> if it is not present. */ public static NetworkLink networkLink() { return networkLink; } /** * Return the autograder. * * @return the autograder. */ public static AutoGrader autoGrader() { return autoGrader; } private static Interrupt interrupt = null; private static Timer timer = null; private static ElevatorBank bank = null; private static Processor processor = null; private static SerialConsole console = null; private static FileSystem stubFileSystem = null; private static NetworkLink networkLink = null; private static AutoGrader autoGrader = null; private static String autoGraderClassName = "nachos.ag.AutoGrader"; /** * Return the name of the shell program that a user-programming kernel must * run. Make sure <tt>UserKernel.run()</tt> <i>always</i> uses this method * to decide which program to run. * * @return the name of the shell program to run. */ public static String getShellProgramName() { if (shellProgramName == null) shellProgramName = Config.getString("Kernel.shellProgram"); Lib.assertTrue(shellProgramName != null); return shellProgramName; } private static String shellProgramName = null; /** * Return the name of the process class that the kernel should use. In the * multi-programming project, returns <tt>nachos.userprog.UserProcess</tt>. * In the VM project, returns <tt>nachos.vm.VMProcess</tt>. In the * networking project, returns <tt>nachos.network.NetProcess</tt>. * * @return the name of the process class that the kernel should use. * * @see nachos.userprog.UserKernel#run * @see nachos.userprog.UserProcess * @see nachos.vm.VMProcess * @see nachos.network.NetProcess */ public static String getProcessClassName() { if (processClassName == null) processClassName = Config.getString("Kernel.processClassName"); Lib.assertTrue(processClassName != null); return processClassName; } private static String processClassName = null; private static NachosSecurityManager securityManager; private static Privilege privilege; private static String[] args = null; private static Stats stats = new Stats(); private static int numPhysPages = -1; private static long randomSeed = 0; private static File baseDirectory, nachosDirectory, testDirectory; private static String configFileName = "nachos.conf"; private static final String help = "\n" + "Options:\n" + "\n" + "\t-d <debug flags>\n" + "\t\tEnable some debug flags, e.g. -d ti\n" + "\n" + "\t-h\n" + "\t\tPrint this help message.\n" + "\n" + "\t-m <pages>\n" + "\t\tSpecify how many physical pages of memory to simulate.\n" + "\n" + "\t-s <seed>\n" + "\t\tSpecify the seed for the random number generator (seed is a\n" + "\t\tlong).\n" + "\n" + "\t-x <program>\n" + "\t\tSpecify a program that UserKernel.run() should execute,\n" + "\t\tinstead of the value of the configuration variable\n" + "\t\tKernel.shellProgram\n" + "\n" + "\t-z\n" + "\t\tprint the copyright message\n" + "\n" + "\t-- <grader class>\n" + "\t\tSpecify an autograder class to use, instead of\n" + "\t\tnachos.ag.AutoGrader\n" + "\n" + "\t-# <grader arguments>\n" + "\t\tSpecify the argument string to pass to the autograder.\n" + "\n" + "\t-[] <config file>\n" + "\t\tSpecifiy a config file to use, instead of nachos.conf\n" + ""; private static final String copyright = "\n" + "Copyright 1992-2001 The Regents of the University of California.\n" + "All rights reserved.\n" + "\n" + "Permission to use, copy, modify, and distribute this software and\n" + "its documentation for any purpose, without fee, and without\n" + "written agreement is hereby granted, provided that the above\n" + "copyright notice and the following two paragraphs appear in all\n" + "copies of this software.\n" + "\n" + "IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY\n" + "PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL\n" + "DAMAGES ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS\n" + "DOCUMENTATION, EVEN IF THE UNIVERSITY OF CALIFORNIA HAS BEEN\n" + "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n" + "\n" + "THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY\n" + "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n" + "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE\n" + "SOFTWARE PROVIDED HEREUNDER IS ON AN \"AS IS\" BASIS, AND THE\n" + "UNIVERSITY OF CALIFORNIA HAS NO OBLIGATION TO PROVIDE\n" + "MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.\n"; private static class MachinePrivilege implements Privilege.MachinePrivilege { public void setConsole(SerialConsole console) { Machine.console = console; } } // dummy variables to make javac smarter private static Coff dummy1 = null; }
package io.demor.nuts.lib; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.*; import com.google.common.primitives.Ints; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; public class ClusterExecutor extends ThreadPoolExecutor { private static final String DEFAULT_LOCK = "CLUSTER"; protected final ThreadLocal<String> cAffineThreadLocal = new ThreadLocal<>(); protected final ClusterQueue cQueue; public ClusterExecutor(final int corePoolSize, final int maximumPoolSize, final long keepAliveTime, final TimeUnit unit, final RejectedExecutionHandler handler) { super(corePoolSize, maximumPoolSize, keepAliveTime, unit, new ClusterQueue(), handler); cQueue = (ClusterQueue) getQueue(); final ClusterThreadFactor factory = new ClusterThreadFactor(); factory.mThreadExitCallback = cQueue; setThreadFactory(factory); cQueue.mAllThreadList = factory.mThreadList; cQueue.mAffineThreadLocal = cAffineThreadLocal; prestartAllCoreThreads(); } public void execute(final String tag, final Runnable command) { if (tag == null) { execute(command); cAffineThreadLocal.remove(); } else { cAffineThreadLocal.set(tag); try { execute(command); } finally { cAffineThreadLocal.remove(); } } } private interface IThreadExit { void onExit(Thread thread); } @SuppressWarnings("SynchronizationOnLocalVariableOrMethodParameter") protected static class ClusterQueue extends LinkedBlockingQueue<Runnable> implements IThreadExit { public static final Random RANDOM = new Random(); static { RANDOM.setSeed(System.currentTimeMillis()); } public Multimap<String, Runnable> mRunnableMap = MultimapBuilder.hashKeys().linkedListValues().build(); public Multimap<Thread, String> mAffineThreadMap = MultimapBuilder.hashKeys().linkedListValues().build(); public Multimap<Thread, String> mAffineLockMap = MultimapBuilder.hashKeys().linkedListValues().build(); public Set<Thread> mWaitingThreadSet = Sets.newConcurrentHashSet(); public List<Thread> mAllThreadList; public ThreadLocal<String> mAffineThreadLocal; @Override public synchronized boolean offer(final Runnable runnable) { String affine = mAffineThreadLocal.get(); System.out.println("offer:" + affine); Thread affineThread = null; try { if (Strings.isNullOrEmpty(affine)) { affine = DEFAULT_LOCK; if (mWaitingThreadSet.isEmpty()) { affineThread = mAllThreadList.get(RANDOM.nextInt(mAllThreadList.size())); } else { affineThread = mWaitingThreadSet.iterator().next(); } return super.offer(runnable); } else { if (DEFAULT_LOCK.equals(affine)) { throw new IllegalArgumentException("thread tag cannot be" + DEFAULT_LOCK); } if (mAffineThreadMap.values().contains(affine)) { for (Thread item : mAffineThreadMap.keySet()) { if (mAffineThreadMap.get(item).contains(affine)) { affineThread = item; break; } } } else { affineThread = minAffineCountThread(); mAffineThreadMap.put(affineThread, affine); } mRunnableMap.put(affine, runnable); return true; } } catch (Throwable e) { throw new IllegalStateException(e); } finally { synchronized (this) { mAffineLockMap.put(affineThread, affine); } Preconditions.checkNotNull(affineThread); synchronized (affineThread) { affineThread.notifyAll(); } } } private Thread minAffineCountThread() { final Multiset<Thread> threadMultiset = HashMultiset.create(); threadMultiset.addAll(mAllThreadList); for (Thread t : mAffineThreadMap.keySet()) { threadMultiset.add(t, mAffineThreadMap.get(t).size()); } return new Ordering<Multiset.Entry<Thread>>() { @Override public int compare(final Multiset.Entry<Thread> left, final Multiset.Entry<Thread> right) { return Ints.compare(left.getCount(), right.getCount()); } }.immutableSortedCopy(threadMultiset.entrySet()) .get(0).getElement(); } @Override public synchronized int size() { return super.size() + mRunnableMap.size(); } @Override public synchronized boolean isEmpty() { return super.isEmpty() && mRunnableMap.isEmpty(); } @Override public synchronized boolean remove(final Object o) { if (o == null) { return false; } if (super.remove(o)) { return true; } else { for (String key : mRunnableMap.keySet()) { if (mRunnableMap.get(key).remove(o)) { return true; } } return false; } } @Override public Runnable poll(final long timeout, final TimeUnit unit) throws InterruptedException { final Thread t = Thread.currentThread(); System.out.println("pool:" + t); try { mWaitingThreadSet.add(t); Runnable result = pollRunnableFromMap(); if (result != null) { return result; } synchronized (t) { t.wait(unit.toMillis(timeout), 0); } return pollRunnableFromMap(); } finally { mWaitingThreadSet.remove(t); } } @Override public Runnable take() throws InterruptedException { final Thread t = Thread.currentThread(); System.out.println("take:" + t); try { mWaitingThreadSet.add(t); Runnable result = pollRunnableFromMap(); if (result != null) { return result; } synchronized (t) { t.wait(); } return pollRunnableFromMap(); } finally { mWaitingThreadSet.remove(t); } } @Override public synchronized void onExit(final Thread thread) { for (String affine : mAffineThreadMap.removeAll(thread)) { Thread t = minAffineCountThread(); mAffineThreadMap.put(t, affine); if (mAffineLockMap.get(thread).contains(affine)) { mAffineLockMap.put(t, affine); } } mAffineLockMap.removeAll(thread); } protected synchronized Runnable pollRunnableFromMap() throws InterruptedException { final Thread t = Thread.currentThread(); for (String affine : mAffineLockMap.get(t)) { if (DEFAULT_LOCK.equals(affine)) { mAffineLockMap.get(t).remove(DEFAULT_LOCK); return super.take(); } Runnable runnable = pollAffineRunnable(affine); if (runnable != null) { mAffineLockMap.get(t).remove(affine); return runnable; } } return null; } protected Runnable pollAffineRunnable(String affine) { if (mRunnableMap.get(affine).isEmpty()) { return null; } final Iterator<Runnable> i = mRunnableMap.get(affine).iterator(); final Runnable runnable = i.next(); if (runnable == null) { return null; } i.remove(); return runnable; } } private class ClusterThreadFactor implements ThreadFactory { private final AtomicInteger mCount = new AtomicInteger(0); private ArrayList<Thread> mThreadList = Lists.newArrayList(); private IThreadExit mThreadExitCallback; @Override public Thread newThread(final Runnable r) { Thread t = new Thread(r, "Nuts Task Thread #" + mCount.getAndIncrement()) { @Override public void run() { super.run(); mThreadList.remove(this); mThreadExitCallback.onExit(this); } }; mThreadList.add(t); return t; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.converter.jaxp; import java.io.IOException; import java.lang.ref.WeakReference; import java.util.HashMap; import java.util.Map; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParserFactory; import org.xml.sax.ContentHandler; import org.xml.sax.DTDHandler; import org.xml.sax.EntityResolver; import org.xml.sax.ErrorHandler; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXNotSupportedException; import org.xml.sax.XMLReader; /** * Manages a pool of XMLReader (and associated SAXParser) instances for reuse. */ public class XMLReaderPool { private final Queue<WeakReference<XMLReader>> pool = new ConcurrentLinkedQueue<>(); private final SAXParserFactory saxParserFactory; /** * Creates a new instance. * * @param saxParserFactory the SAXParserFactory used to create new SAXParser instances */ public XMLReaderPool(SAXParserFactory saxParserFactory) { this.saxParserFactory = saxParserFactory; } /** * Returns an XMLReader that can be used exactly once. Calling one of the {@code parse} methods returns the reader * to the pool. This is useful for e.g. SAXSource which bundles an XMLReader with an InputSource that can also be * consumed just once. * * @return the XMLReader * @throws SAXException see {@link SAXParserFactory#newSAXParser()} * @throws ParserConfigurationException see {@link SAXParserFactory#newSAXParser()} */ public XMLReader createXMLReader() throws SAXException, ParserConfigurationException { XMLReader xmlReader = null; WeakReference<XMLReader> ref; while ((ref = pool.poll()) != null) { if ((xmlReader = ref.get()) != null) { break; } } if (xmlReader == null) { xmlReader = saxParserFactory.newSAXParser().getXMLReader(); } return new OneTimeXMLReader(xmlReader); } /** * Wraps another XMLReader for single use only. */ private final class OneTimeXMLReader implements XMLReader { private final XMLReader xmlReader; private final Map<String, Boolean> initFeatures = new HashMap<>(); private final Map<String, Object> initProperties = new HashMap<>(); private final ContentHandler initContentHandler; private final DTDHandler initDtdHandler; private final EntityResolver initEntityResolver; private final ErrorHandler initErrorHandler; private boolean readerInvalid; private OneTimeXMLReader(XMLReader xmlReader) { this.xmlReader = xmlReader; this.initContentHandler = xmlReader.getContentHandler(); this.initDtdHandler = xmlReader.getDTDHandler(); this.initEntityResolver = xmlReader.getEntityResolver(); this.initErrorHandler = xmlReader.getErrorHandler(); } private void release() { try { // reset XMLReader to its initial state for (Map.Entry<String, Boolean> feature : initFeatures.entrySet()) { try { xmlReader.setFeature(feature.getKey(), feature.getValue().booleanValue()); } catch (Exception e) { // ignore } } for (Map.Entry<String, Object> property : initProperties.entrySet()) { try { xmlReader.setProperty(property.getKey(), property.getValue()); } catch (Exception e) { // ignore } } xmlReader.setContentHandler(initContentHandler); xmlReader.setDTDHandler(initDtdHandler); xmlReader.setEntityResolver(initEntityResolver); xmlReader.setErrorHandler(initErrorHandler); // return the wrapped instance to the pool pool.offer(new WeakReference<>(xmlReader)); } finally { readerInvalid = true; } } @Override public boolean getFeature(String name) throws SAXNotRecognizedException, SAXNotSupportedException { return xmlReader.getFeature(name); } @Override public void setFeature(String name, boolean value) throws SAXNotRecognizedException, SAXNotSupportedException { if (!readerInvalid) { if (!initFeatures.containsKey(name)) { initFeatures.put(name, Boolean.valueOf(xmlReader.getFeature(name))); } xmlReader.setFeature(name, value); } } @Override public Object getProperty(String name) throws SAXNotRecognizedException, SAXNotSupportedException { return xmlReader.getProperty(name); } @Override public void setProperty(String name, Object value) throws SAXNotRecognizedException, SAXNotSupportedException { if (!readerInvalid) { if (!initProperties.containsKey(name)) { initProperties.put(name, xmlReader.getProperty(name)); } xmlReader.setProperty(name, value); } } @Override public ContentHandler getContentHandler() { return xmlReader.getContentHandler(); } @Override public void setContentHandler(ContentHandler handler) { if (!readerInvalid) { xmlReader.setContentHandler(handler); } } @Override public DTDHandler getDTDHandler() { return xmlReader.getDTDHandler(); } @Override public void setDTDHandler(DTDHandler handler) { if (!readerInvalid) { xmlReader.setDTDHandler(handler); } } @Override public EntityResolver getEntityResolver() { return xmlReader.getEntityResolver(); } @Override public void setEntityResolver(EntityResolver resolver) { if (!readerInvalid) { xmlReader.setEntityResolver(resolver); } } @Override public ErrorHandler getErrorHandler() { return xmlReader.getErrorHandler(); } @Override public void setErrorHandler(ErrorHandler handler) { if (!readerInvalid) { xmlReader.setErrorHandler(handler); } } @Override public synchronized void parse(InputSource input) throws IOException, SAXException { checkValid(); try { xmlReader.parse(input); } finally { release(); } } @Override public synchronized void parse(String systemId) throws IOException, SAXException { checkValid(); try { xmlReader.parse(systemId); } finally { release(); } } private void checkValid() { if (readerInvalid) { throw new IllegalStateException("OneTimeXMLReader can only be used once!"); } } } }
package biz.aQute.remote; import java.io.ByteArrayInputStream; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.UUID; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.Constants; import org.osgi.framework.dto.BundleDTO; import org.osgi.framework.launch.Framework; import org.osgi.framework.wiring.dto.BundleRevisionDTO; import aQute.bnd.osgi.Builder; import aQute.bnd.osgi.Jar; import aQute.bnd.version.Version; import aQute.lib.io.IO; import aQute.remote.api.Agent; import aQute.remote.plugin.LauncherSupervisor; import junit.framework.TestCase; public class RemoteTest extends TestCase { private int random; private HashMap<String, Object> configuration; private Framework framework; private BundleContext context; private Bundle agent; private String location; private File tmp; private String getTestName() { return getClass().getName() + "/" + getName(); } @Override protected void setUp() throws Exception { super.setUp(); tmp = IO.getFile("generated/tmp/test/" + getTestName()); IO.delete(tmp); IO.mkdirs(tmp); configuration = new HashMap<>(); configuration.put(Constants.FRAMEWORK_STORAGE_CLEAN, Constants.FRAMEWORK_STORAGE_CLEAN_ONFIRSTINIT); configuration.put(Constants.FRAMEWORK_STORAGE, new File(tmp, "fwstorage").getAbsolutePath()); configuration.put(Constants.FRAMEWORK_SYSTEMPACKAGES_EXTRA, "org.osgi.framework.launch;version=1.2"); framework = new org.apache.felix.framework.FrameworkFactory().newFramework(configuration); framework.init(); framework.start(); context = framework.getBundleContext(); location = "reference:" + IO.getFile("generated/biz.aQute.remote.agent.jar") .toURI() .toString(); agent = context.installBundle(location); agent.start(); } @Override protected void tearDown() throws Exception { framework.stop(); framework.waitForStop(10000); super.tearDown(); } public void testSimple() throws Exception { LauncherSupervisor supervisor = new LauncherSupervisor(); supervisor.connect("localhost", Agent.DEFAULT_PORT); assertNotNull(supervisor); Agent agent = supervisor.getAgent(); assertNotNull(agent.getFramework()); // Create stdin/stderr buffers // and redirect output StringBuffer stderr = new StringBuffer(); StringBuffer stdout = new StringBuffer(); supervisor.setStderr(stderr); supervisor.setStdout(stdout); supervisor.redirect(1); // // Install the bundle systemio // File f = IO.getFile("generated/biz.aQute.remote.test.systemio.jar"); String sha = supervisor.addFile(f); BundleDTO bundle = agent.install(f.getAbsolutePath(), sha); // // Start the bundle and capture the output // String result = agent.start(bundle.id); assertNull(result, result); Thread.sleep(1000); assertEquals("Hello World", stdout.toString() .trim()); stdout.setLength(0); // Send input (will be consumed by the Activator.stop ByteArrayInputStream bin = new ByteArrayInputStream(new String("Input\n").getBytes()); supervisor.setStdin(bin); // stop the bundle (will return input as uppercase) result = agent.stop(bundle.id); assertNull(result, result); Thread.sleep(1000); assertEquals("INPUT", stdout.toString() .trim()); } public void testUpdate() throws Exception { LauncherSupervisor supervisor = new LauncherSupervisor(); supervisor.connect("localhost", Agent.DEFAULT_PORT); File t1 = create("bsn-1", new Version(1, 0, 0)); File t2 = create("bsn-2", new Version(1, 0, 0)); assertTrue(t1.isFile()); assertTrue(t2.isFile()); String sha1 = supervisor.addFile(t1); String sha2 = supervisor.addFile(t2); Map<String, String> update = new HashMap<>(); update.put(t1.getAbsolutePath(), sha1); String errors = supervisor.getAgent() .update(update); assertNull(errors); // // Verify that t1 is installed and t2 not // Bundle b1 = context.getBundle(t1.getAbsolutePath()); assertNotNull(b1); Bundle b2 = context.getBundle(t2.getAbsolutePath()); assertNull(b2); // // Now add a new one // update = new HashMap<>(); update.put(t1.getAbsolutePath(), sha1); update.put(t2.getAbsolutePath(), sha2); errors = supervisor.getAgent() .update(update); assertNull(errors); assertNotNull(context.getBundle(t1.getAbsolutePath())); assertNotNull(context.getBundle(t2.getAbsolutePath())); // // Now change a bundle // t1 = create("bsn-1", new Version(2, 0, 0)); sha1 = supervisor.addFile(t1); update = new HashMap<>(); update.put(t1.getAbsolutePath(), sha1); update.put(t2.getAbsolutePath(), sha2); errors = supervisor.getAgent() .update(update); assertNull(errors); b1 = context.getBundle(t1.getAbsolutePath()); assertNotNull(b1); b2 = context.getBundle(t2.getAbsolutePath()); assertNotNull(b2); assertEquals(new Version(2, 0, 0).toString(), b1.getVersion() .toString()); assertEquals(Bundle.ACTIVE, b1.getState()); assertEquals(Bundle.ACTIVE, b2.getState()); // // Now delete t1 // update = new HashMap<>(); update.put(t2.getAbsolutePath(), sha2); errors = supervisor.getAgent() .update(update); assertNull(errors); assertNull(context.getBundle(t1.getAbsolutePath())); assertNotNull(context.getBundle(t2.getAbsolutePath())); // // Delete all // supervisor.getAgent() .update(null); assertNull(context.getBundle(t1.getAbsolutePath())); assertNull(context.getBundle(t2.getAbsolutePath())); } public void testUpdateOrder() throws Exception { LauncherSupervisor supervisor = new LauncherSupervisor(); supervisor.connect("localhost", Agent.DEFAULT_PORT); List<String> bundles = new ArrayList<>(); LinkedHashMap<String, String> update = new LinkedHashMap<>(); for (int i = 0; i < 50; i++) { String name = UUID.randomUUID() .toString(); File f = create(name, new Version(1, 0, 0)); assertTrue(f.isFile()); String sha = supervisor.addFile(f); update.put(f.getAbsolutePath(), sha); bundles.add(name); } String errors = supervisor.getAgent() .update(update); assertNull(errors); // // Now check installed bundle order // Bundle[] installed = context.getBundles(); for (int i = 2; i < installed.length; i++) { Bundle b = installed[i]; assertTrue(b.getLocation() .endsWith(bundles.get(i - 2) + "-1.0.0.jar")); } // delete supervisor.getAgent() .update(null); } private File create(String bsn, Version v) throws Exception { String name = bsn + "-" + v; Builder b = new Builder(); b.setBundleSymbolicName(bsn); b.setBundleVersion(v); b.setProperty("Random", random++ + ""); b.setProperty("-resourceonly", true + ""); b.setIncludeResource("foo;literal='foo'"); Jar jar = b.build(); assertTrue(b.check()); File file = IO.getFile(tmp, name + ".jar"); file.getParentFile() .mkdirs(); jar.updateModified(System.currentTimeMillis(), "Force it to now"); jar.write(file); b.close(); return file; } /* * Test if we can get the BundleRevisionDTOs */ public void testBRD() throws Exception { LauncherSupervisor supervisor = new LauncherSupervisor(); supervisor.connect("localhost", Agent.DEFAULT_PORT); assertNotNull(supervisor); Agent agent = supervisor.getAgent(); assertNotNull(agent); List<BundleRevisionDTO> bundleRevisons = agent.getBundleRevisons(); assertNotNull(bundleRevisons); } }
/* * Copyright 2018 University of Michigan * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.verdictdb.jdbc41; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.Arrays; import java.util.List; import org.verdictdb.VerdictSingleResult; import org.verdictdb.commons.DataTypeConverter; public class VerdictResultSetMetaData implements ResultSetMetaData { private VerdictSingleResult queryResult; List<String> caseSensitiveColumnTypes = Arrays.asList("char", "string", "text"); List<String> signedColumnTypes = Arrays.asList("double", "int", "real"); public VerdictResultSetMetaData(VerdictSingleResult queryResult) { this.queryResult = queryResult; } @Override public <T> T unwrap(Class<T> iface) throws SQLException { throw new SQLException("Not supported function."); } @Override public boolean isWrapperFor(Class<?> iface) throws SQLException { throw new SQLException("Not supported function."); } @Override public int getColumnCount() throws SQLException { return queryResult.getColumnCount(); } @Override public boolean isAutoIncrement(int column) throws SQLException { return queryResult.getMetaData().isAutoIncrement.get(column - 1); // throw new SQLException("Not supported function."); } @Override public boolean isCaseSensitive(int column) throws SQLException { String typeName = DataTypeConverter.typeName(queryResult.getColumnType(column - 1)); for (String a : caseSensitiveColumnTypes) { if (typeName.contains(a)) { return true; } } return false; } @Override public boolean isSearchable(int column) throws SQLException { return false; } @Override public boolean isCurrency(int column) throws SQLException { return queryResult.getMetaData().isCurrency.get(column - 1); // throw new SQLException("Not supported function."); } @Override public int isNullable(int column) throws SQLException { return queryResult.getMetaData().isNullable.get(column - 1); // return java.sql.ResultSetMetaData.columnNullableUnknown; } @Override public boolean isSigned(int column) throws SQLException { String typeName = DataTypeConverter.typeName(queryResult.getColumnType(column - 1)); for (String a : signedColumnTypes) { if (typeName.contains((a))) { return true; } } return false; } @Override public int getColumnDisplaySize(int column) throws SQLException { return queryResult.getMetaData().columnDisplaySize.get(column - 1); // return Math.max(getPrecision(column), queryResult.getColumnName(column-1).length()); } @Override public String getColumnLabel(int column) throws SQLException { return queryResult.getColumnName(column - 1); } @Override public String getColumnName(int column) throws SQLException { return queryResult.getColumnName(column - 1); } @Override public String getSchemaName(int column) throws SQLException { throw new SQLException("Not supported function."); } @Override public int getPrecision(int column) throws SQLException { return queryResult.getMetaData().precision.get(column - 1); /* int coltype = queryResult.getColumnType(column-1); if (coltype == BIGINT) { return 19; } else if (coltype == INTEGER) { return 10; } else if (coltype == SMALLINT) { return 5; } else if (coltype == TINYINT) { return 3; } else if (coltype == DOUBLE || coltype == NUMERIC) { return 64; } else if (coltype == FLOAT) { return 17; } else if (coltype == DATE) { return 10; } else if (coltype == TIME) { return 8; } else if (coltype == TIMESTAMP) { return 26; } else { return 0; } */ } @Override public int getScale(int column) throws SQLException { return queryResult.getMetaData().scale.get(column - 1); /* String typeName = DataTypeConverter.typeName(queryResult.getColumnType(column-1)); if (typeName.contains("double") || typeName.contains("float")) { return 10; } return 0; */ } @Override public String getTableName(int column) throws SQLException { throw new SQLException("Not supported function."); } @Override public String getCatalogName(int column) throws SQLException { throw new SQLException("Not supported function."); } @Override public int getColumnType(int column) throws SQLException { return queryResult.getColumnType(column - 1); } @Override public String getColumnTypeName(int column) throws SQLException { return DataTypeConverter.typeName(queryResult.getColumnType(column - 1)); } @Override public boolean isReadOnly(int column) throws SQLException { return true; } @Override public boolean isWritable(int column) throws SQLException { return false; } @Override public boolean isDefinitelyWritable(int column) throws SQLException { return false; } @Override public String getColumnClassName(int column) throws SQLException { return queryResult.getMetaData().columnClassName.get(column - 1); // return DataTypeConverter.typeName(queryResult.getColumnType(column-1)); } }
/* * Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.core.dao; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.base.IdentityException; import org.wso2.carbon.identity.core.IdentityRegistryResources; import org.wso2.carbon.identity.core.model.OpenIDUserRPDO; import org.wso2.carbon.registry.core.Association; import org.wso2.carbon.registry.core.Collection; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.jdbc.utils.Transaction; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.List; public class OpenIDUserRPDAO extends AbstractDAO<OpenIDUserRPDO> { protected Log log = LogFactory.getLog(OpenIDUserRPDAO.class); /** * @param registry */ public OpenIDUserRPDAO(Registry registry) { this.registry = registry; } /** * Creates a Relying Party and asscociates it with the User * * @param oprp * @throws IdentityException */ public void create(OpenIDUserRPDO oprp) throws IdentityException { String path = null; Resource resource = null; Collection userResource = null; if (log.isDebugEnabled()) { log.debug("Creating an OpenID user relying party"); } try { path = IdentityRegistryResources.OPENID_USER_RP_ROOT + oprp.getUuid(); if (registry.resourceExists(path)) { log.info("OpenID user RP trying to create already exists"); return; } /* * rp = getFirstObjectWithPropertyValue(IdentityRegistryResources. * OPENID_USER_RP_ROOT, * IdentityRegistryResources.PROP_RP_URL, oprp.getRpUrl()); * * if (rp != null) { * log.info("OpenID user RP trying to create already exists"); * return; * } */ resource = registry.newResource(); resource.setProperty(IdentityRegistryResources.PROP_RP_URL, oprp.getRpUrl()); resource.setProperty(IdentityRegistryResources.PROP_IS_TRUSTED_ALWAYS, Boolean.toString(oprp.isTrustedAlways())); resource.setProperty(IdentityRegistryResources.PROP_VISIT_COUNT, Integer.toString(oprp.getVisitCount())); resource.setProperty(IdentityRegistryResources.PROP_LAST_VISIT, new SimpleDateFormat("yyyy/MM/dd HH:mm:ss").format(oprp.getLastVisit())); resource.setProperty(IdentityRegistryResources.PROP_DEFAULT_PROFILE_NAME, oprp.getDefaultProfileName()); resource.setProperty(IdentityRegistryResources.PROP_USER_ID, oprp.getUserName()); boolean transactionStarted = Transaction.isStarted(); try { if (!transactionStarted) { registry.beginTransaction(); } registry.put(path, resource); if (!registry.resourceExists(RegistryConstants.PROFILES_PATH + oprp.getUserName())) { userResource = registry.newCollection(); registry.put(RegistryConstants.PROFILES_PATH + oprp.getUserName(), userResource); } else { userResource = (Collection) registry.get(RegistryConstants.PROFILES_PATH + oprp.getUserName()); } registry.addAssociation(RegistryConstants.PROFILES_PATH + oprp.getUserName(), path, IdentityRegistryResources.ASSOCIATION_USER_OPENID_RP); if (!transactionStarted) { registry.commitTransaction(); } } catch (Exception e) { if (!transactionStarted) { registry.rollbackTransaction(); } if (e instanceof RegistryException) { throw (RegistryException) e; } else { throw new IdentityException( "Error occured while creating an OpenID user relying party", e); } } } catch (RegistryException e) { log.error("Error occured while creating an OpenID user relying party", e); throw new IdentityException( "Error occured while creating an OpenID user relying party", e); } } /** * Updates the Relying Party if exists, if not, then creates a new Relying * Party * * @param oprp * @throws IdentityException */ public void update(OpenIDUserRPDO oprp) throws IdentityException { String path = null; Resource resource = null; if (log.isDebugEnabled()) { log.debug("Updating an OpenID user relying party"); } try { path = IdentityRegistryResources.OPENID_USER_RP_ROOT + oprp.getUuid(); if (!registry.resourceExists(path)) { log.info("OpenID user RP trying to update does not exist"); return; } /* * rp = * getFirstObjectWithPropertyValue(IdentityRegistryResources. * OPENID_USER_RP_ROOT, * IdentityRegistryResources.PROP_RP_URL, * oprp.getRpUrl()); * * if (rp == null) { * log.info("OpenID user RP trying to update does not exist"); * return; * } */ resource = registry.get(path); resource.setProperty(IdentityRegistryResources.PROP_RP_URL, oprp.getRpUrl()); resource.setProperty(IdentityRegistryResources.PROP_IS_TRUSTED_ALWAYS, Boolean.toString(oprp.isTrustedAlways())); resource.setProperty(IdentityRegistryResources.PROP_VISIT_COUNT, Integer.toString(oprp.getVisitCount())); resource.setProperty(IdentityRegistryResources.PROP_LAST_VISIT, new SimpleDateFormat("yyyy/MM/dd HH:mm:ss").format(oprp.getLastVisit())); resource.setProperty(IdentityRegistryResources.PROP_DEFAULT_PROFILE_NAME, oprp.getDefaultProfileName()); resource.setProperty(IdentityRegistryResources.PROP_USER_ID, oprp.getUserName()); registry.put(path, resource); } catch (RegistryException e) { log.error("Error occured while updating an OpenID user relying party", e); throw new IdentityException( "Error occured while updating an OpenID user relying party", e); } } /** * @param oprp * @throws IdentityException */ public void delete(OpenIDUserRPDO oprp) throws IdentityException { String path = null; if (log.isDebugEnabled()) { log.debug("Deleting an OpenID user relying party"); } try { path = IdentityRegistryResources.OPENID_USER_RP_ROOT + oprp.getUuid(); boolean transactionStarted = Transaction.isStarted(); try { if (!transactionStarted) { registry.beginTransaction(); } registry.removeAssociation(RegistryConstants.PROFILES_PATH + oprp.getUserName(), path, IdentityRegistryResources.ASSOCIATION_USER_OPENID_RP); registry.delete(path); if (!transactionStarted) { registry.commitTransaction(); } } catch (Exception e) { if (!transactionStarted) { registry.rollbackTransaction(); } if (e instanceof RegistryException) { throw (RegistryException) e; } else { throw new IdentityException( "Error occured while deleting an OpenID user relying party", e); } } } catch (RegistryException e) { log.error("Error occured while deleting an OpenID user relying party", e); throw new IdentityException( "Error occured while deleting an OpenID user relying party", e); } } /** * Returns relying party user settings corresponding to a given user name. * * @param userName Unique user name * @param rpUrl Relying party urlupdateOpenIDUserRPInfo * @return A set of OpenIDUserRPDO, corresponding to the provided user name * and RP url. * @throws IdentityException */ public OpenIDUserRPDO getOpenIDUserRP(String userName, String rpUrl) throws IdentityException { OpenIDUserRPDO rp = null; Association[] assoc = null; if (log.isDebugEnabled()) { log.debug("Retreiving OpenID user relying party"); } try { if (registry.resourceExists(RegistryConstants.PROFILES_PATH + userName)) { assoc = registry.getAssociations(RegistryConstants.PROFILES_PATH + userName, IdentityRegistryResources.ASSOCIATION_USER_OPENID_RP); for (Association association : assoc) { rp = resourceToObject(registry.get(association.getDestinationPath())); if (rp.getRpUrl().equals(rpUrl)) { return rp; } } } } catch (RegistryException e) { log.error("Error occured while retreiving OpenID user relying party", e); throw new IdentityException("Error occured while retreiving OpenID user relying party", e); } return rp; } /** * @return * @throws IdentityException */ public OpenIDUserRPDO[] getAllOpenIDUserRP() throws IdentityException { List<OpenIDUserRPDO> rpdos = null; Collection rps = null; String[] children = null; if (log.isDebugEnabled()) { log.debug("Retrieving all OP RPs"); } try { rps = (Collection) registry.get(IdentityRegistryResources.OPENID_USER_RP_ROOT); rpdos = new ArrayList<OpenIDUserRPDO>(); if (rps != null && rps.getChildCount() > 0) { children = rps.getChildren(); for (String child : children) { rpdos.add(resourceToObject(registry.get(child))); } } } catch (RegistryException e) { log.error("Error occured while retreiving all OP RPs", e); throw new IdentityException("Error occured while retreiving all OP RPs", e); } return rpdos.toArray(new OpenIDUserRPDO[rpdos.size()]); } /** * Returns relying party user settings corresponding to a given user name. * * @param userName Unique user name * @return OpenIDUserRPDO, corresponding to the provided user name and RP * url. * @throws IdentityException */ public OpenIDUserRPDO[] getOpenIDUserRPs(String userName) throws IdentityException { List<OpenIDUserRPDO> lst = null; Association[] assoc = null; OpenIDUserRPDO rp = null; if (log.isDebugEnabled()) { log.debug("Retreiving OpenID user relying parties"); } try { lst = new ArrayList<OpenIDUserRPDO>(); if (registry.resourceExists(RegistryConstants.PROFILES_PATH + userName)) { assoc = registry.getAssociations(RegistryConstants.PROFILES_PATH + userName, IdentityRegistryResources.ASSOCIATION_USER_OPENID_RP); for (Association association : assoc) { rp = resourceToObject(registry.get(association.getDestinationPath())); rp.setUserName(userName); lst.add(rp); } } } catch (RegistryException e) { log.error("Error occured while retreiving OpenID user relying parties", e); throw new IdentityException( "Error occured while retreiving OpenID user relying parties", e); } return lst.toArray(new OpenIDUserRPDO[lst.size()]); } /** * Returns the default user profile corresponding to the given user name and * the RP URL. * * @param userName Unique user name * @param rpUrl Relying party URL * @return Default user profile * @throws IdentityException */ public String getOpenIDDefaultUserProfile(String userName, String rpUrl) throws IdentityException { OpenIDUserRPDO oprp = null; if (log.isDebugEnabled()) { log.debug("Retreiving OpenID default user profile for user " + userName); } oprp = getOpenIDUserRP(userName, rpUrl); return oprp.getDefaultProfileName(); } /** * Returns user name,number of total visits, last login time and OpenID, of * all the users who at * least used his OpenID once. * * @return user data */ protected OpenIDUserRPDO resourceToObject(Resource resource) { OpenIDUserRPDO rp = null; if (resource != null) { rp = new OpenIDUserRPDO(); String path = resource.getPath(); String[] values = path.split("/"); String uuid = values[values.length - 1]; rp.setUuid(uuid); rp.setRpUrl(resource.getProperty(IdentityRegistryResources.PROP_RP_URL)); rp.setTrustedAlways(Boolean.parseBoolean(resource.getProperty(IdentityRegistryResources.PROP_IS_TRUSTED_ALWAYS))); rp.setVisitCount(Integer.parseInt(resource.getProperty(IdentityRegistryResources.PROP_VISIT_COUNT))); try { rp.setLastVisit(new SimpleDateFormat("yyyy/MM/dd HH:mm:ss").parse(resource.getProperty(IdentityRegistryResources.PROP_LAST_VISIT))); } catch (ParseException e) { if (log.isDebugEnabled()) { log.error("Error while parsing resourceToObject", e); } } rp.setDefaultProfileName(resource.getProperty(IdentityRegistryResources.PROP_DEFAULT_PROFILE_NAME)); rp.setUserName(resource.getProperty(IdentityRegistryResources.PROP_USER_ID)); } return rp; } }
/** * Generated with Acceleo */ package com.github.lbroudoux.dsl.eip.parts.impl; // Start of user code for imports import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.eclipse.emf.common.util.Enumerator; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EReference; import org.eclipse.emf.edit.ui.provider.AdapterFactoryLabelProvider; import org.eclipse.emf.eef.runtime.EEFRuntimePlugin; import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent; import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent; import org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart; import org.eclipse.emf.eef.runtime.context.impl.EObjectPropertiesEditionContext; import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent; import org.eclipse.emf.eef.runtime.impl.parts.CompositePropertiesEditionPart; import org.eclipse.emf.eef.runtime.policies.PropertiesEditingPolicy; import org.eclipse.emf.eef.runtime.providers.PropertiesEditingProvider; import org.eclipse.emf.eef.runtime.ui.parts.PartComposer; import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence; import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence; import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionStep; import org.eclipse.emf.eef.runtime.ui.utils.EditingUtils; import org.eclipse.emf.eef.runtime.ui.widgets.EMFComboViewer; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable.ReferencesTableListener; import org.eclipse.emf.eef.runtime.ui.widgets.SWTUtils; import org.eclipse.emf.eef.runtime.ui.widgets.TabElementTreeSelectionDialog; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableContentProvider; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings; import org.eclipse.jface.viewers.ArrayContentProvider; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.swt.SWT; import org.eclipse.swt.events.FocusAdapter; import org.eclipse.swt.events.FocusEvent; import org.eclipse.swt.events.KeyAdapter; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Text; import com.github.lbroudoux.dsl.eip.parts.EipViewsRepository; import com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart; import com.github.lbroudoux.dsl.eip.providers.EipMessages; // End of user code /** * @author yanngv29 * */ public class EnricherPropertiesEditionPartImpl extends CompositePropertiesEditionPart implements ISWTPropertiesEditionPart, EnricherPropertiesEditionPart { protected Text name; protected ReferencesTable toChannels; protected List<ViewerFilter> toChannelsBusinessFilters = new ArrayList<ViewerFilter>(); protected List<ViewerFilter> toChannelsFilters = new ArrayList<ViewerFilter>(); protected ReferencesTable fromChannels; protected List<ViewerFilter> fromChannelsBusinessFilters = new ArrayList<ViewerFilter>(); protected List<ViewerFilter> fromChannelsFilters = new ArrayList<ViewerFilter>(); protected ReferencesTable ownedServiceInvocations; protected List<ViewerFilter> ownedServiceInvocationsBusinessFilters = new ArrayList<ViewerFilter>(); protected List<ViewerFilter> ownedServiceInvocationsFilters = new ArrayList<ViewerFilter>(); protected EMFComboViewer part; /** * Default constructor * @param editionComponent the {@link IPropertiesEditionComponent} that manage this part * */ public EnricherPropertiesEditionPartImpl(IPropertiesEditionComponent editionComponent) { super(editionComponent); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart# * createFigure(org.eclipse.swt.widgets.Composite) * */ public Composite createFigure(final Composite parent) { view = new Composite(parent, SWT.NONE); GridLayout layout = new GridLayout(); layout.numColumns = 3; view.setLayout(layout); createControls(view); return view; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart# * createControls(org.eclipse.swt.widgets.Composite) * */ public void createControls(Composite view) { CompositionSequence enricherStep = new BindingCompositionSequence(propertiesEditionComponent); CompositionStep propertiesStep = enricherStep.addStep(EipViewsRepository.Enricher.Properties.class); propertiesStep.addStep(EipViewsRepository.Enricher.Properties.name); propertiesStep.addStep(EipViewsRepository.Enricher.Properties.toChannels); propertiesStep.addStep(EipViewsRepository.Enricher.Properties.fromChannels); propertiesStep.addStep(EipViewsRepository.Enricher.Properties.ownedServiceInvocations); propertiesStep.addStep(EipViewsRepository.Enricher.Properties.part); composer = new PartComposer(enricherStep) { @Override public Composite addToPart(Composite parent, Object key) { if (key == EipViewsRepository.Enricher.Properties.class) { return createPropertiesGroup(parent); } if (key == EipViewsRepository.Enricher.Properties.name) { return createNameText(parent); } if (key == EipViewsRepository.Enricher.Properties.toChannels) { return createToChannelsAdvancedReferencesTable(parent); } if (key == EipViewsRepository.Enricher.Properties.fromChannels) { return createFromChannelsAdvancedReferencesTable(parent); } if (key == EipViewsRepository.Enricher.Properties.ownedServiceInvocations) { return createOwnedServiceInvocationsAdvancedTableComposition(parent); } if (key == EipViewsRepository.Enricher.Properties.part) { return createPartEMFComboViewer(parent); } return parent; } }; composer.compose(view); } /** * */ protected Composite createPropertiesGroup(Composite parent) { Group propertiesGroup = new Group(parent, SWT.NONE); propertiesGroup.setText(EipMessages.EnricherPropertiesEditionPart_PropertiesGroupLabel); GridData propertiesGroupData = new GridData(GridData.FILL_HORIZONTAL); propertiesGroupData.horizontalSpan = 3; propertiesGroup.setLayoutData(propertiesGroupData); GridLayout propertiesGroupLayout = new GridLayout(); propertiesGroupLayout.numColumns = 3; propertiesGroup.setLayout(propertiesGroupLayout); return propertiesGroup; } protected Composite createNameText(Composite parent) { createDescription(parent, EipViewsRepository.Enricher.Properties.name, EipMessages.EnricherPropertiesEditionPart_NameLabel); name = SWTUtils.createScrollableText(parent, SWT.BORDER); GridData nameData = new GridData(GridData.FILL_HORIZONTAL); name.setLayoutData(nameData); name.addFocusListener(new FocusAdapter() { /** * {@inheritDoc} * * @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent) * */ @Override @SuppressWarnings("synthetic-access") public void focusLost(FocusEvent e) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.name, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, name.getText())); } }); name.addKeyListener(new KeyAdapter() { /** * {@inheritDoc} * * @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent) * */ @Override @SuppressWarnings("synthetic-access") public void keyPressed(KeyEvent e) { if (e.character == SWT.CR) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.name, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, name.getText())); } } }); EditingUtils.setID(name, EipViewsRepository.Enricher.Properties.name); EditingUtils.setEEFtype(name, "eef::Text"); //$NON-NLS-1$ SWTUtils.createHelpButton(parent, propertiesEditionComponent.getHelpContent(EipViewsRepository.Enricher.Properties.name, EipViewsRepository.SWT_KIND), null); //$NON-NLS-1$ // Start of user code for createNameText // End of user code return parent; } /** * */ protected Composite createToChannelsAdvancedReferencesTable(Composite parent) { String label = getDescription(EipViewsRepository.Enricher.Properties.toChannels, EipMessages.EnricherPropertiesEditionPart_ToChannelsLabel); this.toChannels = new ReferencesTable(label, new ReferencesTableListener() { public void handleAdd() { addToChannels(); } public void handleEdit(EObject element) { editToChannels(element); } public void handleMove(EObject element, int oldIndex, int newIndex) { moveToChannels(element, oldIndex, newIndex); } public void handleRemove(EObject element) { removeFromToChannels(element); } public void navigateTo(EObject element) { } }); this.toChannels.setHelpText(propertiesEditionComponent.getHelpContent(EipViewsRepository.Enricher.Properties.toChannels, EipViewsRepository.SWT_KIND)); this.toChannels.createControls(parent); this.toChannels.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { if (e.item != null && e.item.getData() instanceof EObject) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.toChannels, PropertiesEditionEvent.CHANGE, PropertiesEditionEvent.SELECTION_CHANGED, null, e.item.getData())); } } }); GridData toChannelsData = new GridData(GridData.FILL_HORIZONTAL); toChannelsData.horizontalSpan = 3; this.toChannels.setLayoutData(toChannelsData); this.toChannels.disableMove(); toChannels.setID(EipViewsRepository.Enricher.Properties.toChannels); toChannels.setEEFType("eef::AdvancedReferencesTable"); //$NON-NLS-1$ return parent; } /** * */ protected void addToChannels() { TabElementTreeSelectionDialog dialog = new TabElementTreeSelectionDialog(toChannels.getInput(), toChannelsFilters, toChannelsBusinessFilters, "toChannels", propertiesEditionComponent.getEditingContext().getAdapterFactory(), current.eResource()) { @Override public void process(IStructuredSelection selection) { for (Iterator<?> iter = selection.iterator(); iter.hasNext();) { EObject elem = (EObject) iter.next(); propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.toChannels, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.ADD, null, elem)); } toChannels.refresh(); } }; dialog.open(); } /** * */ protected void moveToChannels(EObject element, int oldIndex, int newIndex) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.toChannels, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.MOVE, element, newIndex)); toChannels.refresh(); } /** * */ protected void removeFromToChannels(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.toChannels, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.REMOVE, null, element)); toChannels.refresh(); } /** * */ protected void editToChannels(EObject element) { EObjectPropertiesEditionContext context = new EObjectPropertiesEditionContext(propertiesEditionComponent.getEditingContext(), propertiesEditionComponent, element, adapterFactory); PropertiesEditingProvider provider = (PropertiesEditingProvider)adapterFactory.adapt(element, PropertiesEditingProvider.class); if (provider != null) { PropertiesEditingPolicy policy = provider.getPolicy(context); if (policy != null) { policy.execute(); toChannels.refresh(); } } } /** * */ protected Composite createFromChannelsAdvancedReferencesTable(Composite parent) { String label = getDescription(EipViewsRepository.Enricher.Properties.fromChannels, EipMessages.EnricherPropertiesEditionPart_FromChannelsLabel); this.fromChannels = new ReferencesTable(label, new ReferencesTableListener() { public void handleAdd() { addFromChannels(); } public void handleEdit(EObject element) { editFromChannels(element); } public void handleMove(EObject element, int oldIndex, int newIndex) { moveFromChannels(element, oldIndex, newIndex); } public void handleRemove(EObject element) { removeFromFromChannels(element); } public void navigateTo(EObject element) { } }); this.fromChannels.setHelpText(propertiesEditionComponent.getHelpContent(EipViewsRepository.Enricher.Properties.fromChannels, EipViewsRepository.SWT_KIND)); this.fromChannels.createControls(parent); this.fromChannels.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { if (e.item != null && e.item.getData() instanceof EObject) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.fromChannels, PropertiesEditionEvent.CHANGE, PropertiesEditionEvent.SELECTION_CHANGED, null, e.item.getData())); } } }); GridData fromChannelsData = new GridData(GridData.FILL_HORIZONTAL); fromChannelsData.horizontalSpan = 3; this.fromChannels.setLayoutData(fromChannelsData); this.fromChannels.disableMove(); fromChannels.setID(EipViewsRepository.Enricher.Properties.fromChannels); fromChannels.setEEFType("eef::AdvancedReferencesTable"); //$NON-NLS-1$ return parent; } /** * */ protected void addFromChannels() { TabElementTreeSelectionDialog dialog = new TabElementTreeSelectionDialog(fromChannels.getInput(), fromChannelsFilters, fromChannelsBusinessFilters, "fromChannels", propertiesEditionComponent.getEditingContext().getAdapterFactory(), current.eResource()) { @Override public void process(IStructuredSelection selection) { for (Iterator<?> iter = selection.iterator(); iter.hasNext();) { EObject elem = (EObject) iter.next(); propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.fromChannels, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.ADD, null, elem)); } fromChannels.refresh(); } }; dialog.open(); } /** * */ protected void moveFromChannels(EObject element, int oldIndex, int newIndex) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.fromChannels, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.MOVE, element, newIndex)); fromChannels.refresh(); } /** * */ protected void removeFromFromChannels(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.fromChannels, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.REMOVE, null, element)); fromChannels.refresh(); } /** * */ protected void editFromChannels(EObject element) { EObjectPropertiesEditionContext context = new EObjectPropertiesEditionContext(propertiesEditionComponent.getEditingContext(), propertiesEditionComponent, element, adapterFactory); PropertiesEditingProvider provider = (PropertiesEditingProvider)adapterFactory.adapt(element, PropertiesEditingProvider.class); if (provider != null) { PropertiesEditingPolicy policy = provider.getPolicy(context); if (policy != null) { policy.execute(); fromChannels.refresh(); } } } /** * @param container * */ protected Composite createOwnedServiceInvocationsAdvancedTableComposition(Composite parent) { this.ownedServiceInvocations = new ReferencesTable(getDescription(EipViewsRepository.Enricher.Properties.ownedServiceInvocations, EipMessages.EnricherPropertiesEditionPart_OwnedServiceInvocationsLabel), new ReferencesTableListener() { public void handleAdd() { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.ownedServiceInvocations, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.ADD, null, null)); ownedServiceInvocations.refresh(); } public void handleEdit(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.ownedServiceInvocations, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.EDIT, null, element)); ownedServiceInvocations.refresh(); } public void handleMove(EObject element, int oldIndex, int newIndex) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.ownedServiceInvocations, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.MOVE, element, newIndex)); ownedServiceInvocations.refresh(); } public void handleRemove(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.ownedServiceInvocations, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.REMOVE, null, element)); ownedServiceInvocations.refresh(); } public void navigateTo(EObject element) { } }); for (ViewerFilter filter : this.ownedServiceInvocationsFilters) { this.ownedServiceInvocations.addFilter(filter); } this.ownedServiceInvocations.setHelpText(propertiesEditionComponent.getHelpContent(EipViewsRepository.Enricher.Properties.ownedServiceInvocations, EipViewsRepository.SWT_KIND)); this.ownedServiceInvocations.createControls(parent); this.ownedServiceInvocations.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { if (e.item != null && e.item.getData() instanceof EObject) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.ownedServiceInvocations, PropertiesEditionEvent.CHANGE, PropertiesEditionEvent.SELECTION_CHANGED, null, e.item.getData())); } } }); GridData ownedServiceInvocationsData = new GridData(GridData.FILL_HORIZONTAL); ownedServiceInvocationsData.horizontalSpan = 3; this.ownedServiceInvocations.setLayoutData(ownedServiceInvocationsData); this.ownedServiceInvocations.setLowerBound(0); this.ownedServiceInvocations.setUpperBound(-1); ownedServiceInvocations.setID(EipViewsRepository.Enricher.Properties.ownedServiceInvocations); ownedServiceInvocations.setEEFType("eef::AdvancedTableComposition"); //$NON-NLS-1$ // Start of user code for createOwnedServiceInvocationsAdvancedTableComposition // End of user code return parent; } protected Composite createPartEMFComboViewer(Composite parent) { createDescription(parent, EipViewsRepository.Enricher.Properties.part, EipMessages.EnricherPropertiesEditionPart_PartLabel); part = new EMFComboViewer(parent); part.setContentProvider(new ArrayContentProvider()); part.setLabelProvider(new AdapterFactoryLabelProvider(EEFRuntimePlugin.getDefault().getAdapterFactory())); GridData partData = new GridData(GridData.FILL_HORIZONTAL); part.getCombo().setLayoutData(partData); part.addSelectionChangedListener(new ISelectionChangedListener() { /** * {@inheritDoc} * * @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent) * */ public void selectionChanged(SelectionChangedEvent event) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EnricherPropertiesEditionPartImpl.this, EipViewsRepository.Enricher.Properties.part, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getPart())); } }); part.setID(EipViewsRepository.Enricher.Properties.part); SWTUtils.createHelpButton(parent, propertiesEditionComponent.getHelpContent(EipViewsRepository.Enricher.Properties.part, EipViewsRepository.SWT_KIND), null); //$NON-NLS-1$ // Start of user code for createPartEMFComboViewer // End of user code return parent; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public void firePropertiesChanged(IPropertiesEditionEvent event) { // Start of user code for tab synchronization // End of user code } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#getName() * */ public String getName() { return name.getText(); } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#setName(String newValue) * */ public void setName(String newValue) { if (newValue != null) { name.setText(newValue); } else { name.setText(""); //$NON-NLS-1$ } boolean eefElementEditorReadOnlyState = isReadOnly(EipViewsRepository.Enricher.Properties.name); if (eefElementEditorReadOnlyState && name.isEnabled()) { name.setEnabled(false); name.setToolTipText(EipMessages.Enricher_ReadOnly); } else if (!eefElementEditorReadOnlyState && !name.isEnabled()) { name.setEnabled(true); } } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#initToChannels(org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings) */ public void initToChannels(ReferencesTableSettings settings) { if (current.eResource() != null && current.eResource().getResourceSet() != null) this.resourceSet = current.eResource().getResourceSet(); ReferencesTableContentProvider contentProvider = new ReferencesTableContentProvider(); toChannels.setContentProvider(contentProvider); toChannels.setInput(settings); boolean eefElementEditorReadOnlyState = isReadOnly(EipViewsRepository.Enricher.Properties.toChannels); if (eefElementEditorReadOnlyState && toChannels.getTable().isEnabled()) { toChannels.setEnabled(false); toChannels.setToolTipText(EipMessages.Enricher_ReadOnly); } else if (!eefElementEditorReadOnlyState && !toChannels.getTable().isEnabled()) { toChannels.setEnabled(true); } } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#updateToChannels() * */ public void updateToChannels() { toChannels.refresh(); } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#addFilterToChannels(ViewerFilter filter) * */ public void addFilterToToChannels(ViewerFilter filter) { toChannelsFilters.add(filter); } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#addBusinessFilterToChannels(ViewerFilter filter) * */ public void addBusinessFilterToToChannels(ViewerFilter filter) { toChannelsBusinessFilters.add(filter); } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#isContainedInToChannelsTable(EObject element) * */ public boolean isContainedInToChannelsTable(EObject element) { return ((ReferencesTableSettings)toChannels.getInput()).contains(element); } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#initFromChannels(org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings) */ public void initFromChannels(ReferencesTableSettings settings) { if (current.eResource() != null && current.eResource().getResourceSet() != null) this.resourceSet = current.eResource().getResourceSet(); ReferencesTableContentProvider contentProvider = new ReferencesTableContentProvider(); fromChannels.setContentProvider(contentProvider); fromChannels.setInput(settings); boolean eefElementEditorReadOnlyState = isReadOnly(EipViewsRepository.Enricher.Properties.fromChannels); if (eefElementEditorReadOnlyState && fromChannels.getTable().isEnabled()) { fromChannels.setEnabled(false); fromChannels.setToolTipText(EipMessages.Enricher_ReadOnly); } else if (!eefElementEditorReadOnlyState && !fromChannels.getTable().isEnabled()) { fromChannels.setEnabled(true); } } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#updateFromChannels() * */ public void updateFromChannels() { fromChannels.refresh(); } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#addFilterFromChannels(ViewerFilter filter) * */ public void addFilterToFromChannels(ViewerFilter filter) { fromChannelsFilters.add(filter); } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#addBusinessFilterFromChannels(ViewerFilter filter) * */ public void addBusinessFilterToFromChannels(ViewerFilter filter) { fromChannelsBusinessFilters.add(filter); } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#isContainedInFromChannelsTable(EObject element) * */ public boolean isContainedInFromChannelsTable(EObject element) { return ((ReferencesTableSettings)fromChannels.getInput()).contains(element); } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#initOwnedServiceInvocations(EObject current, EReference containingFeature, EReference feature) */ public void initOwnedServiceInvocations(ReferencesTableSettings settings) { if (current.eResource() != null && current.eResource().getResourceSet() != null) this.resourceSet = current.eResource().getResourceSet(); ReferencesTableContentProvider contentProvider = new ReferencesTableContentProvider(); ownedServiceInvocations.setContentProvider(contentProvider); ownedServiceInvocations.setInput(settings); boolean eefElementEditorReadOnlyState = isReadOnly(EipViewsRepository.Enricher.Properties.ownedServiceInvocations); if (eefElementEditorReadOnlyState && ownedServiceInvocations.isEnabled()) { ownedServiceInvocations.setEnabled(false); ownedServiceInvocations.setToolTipText(EipMessages.Enricher_ReadOnly); } else if (!eefElementEditorReadOnlyState && !ownedServiceInvocations.isEnabled()) { ownedServiceInvocations.setEnabled(true); } } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#updateOwnedServiceInvocations() * */ public void updateOwnedServiceInvocations() { ownedServiceInvocations.refresh(); } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#addFilterOwnedServiceInvocations(ViewerFilter filter) * */ public void addFilterToOwnedServiceInvocations(ViewerFilter filter) { ownedServiceInvocationsFilters.add(filter); if (this.ownedServiceInvocations != null) { this.ownedServiceInvocations.addFilter(filter); } } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#addBusinessFilterOwnedServiceInvocations(ViewerFilter filter) * */ public void addBusinessFilterToOwnedServiceInvocations(ViewerFilter filter) { ownedServiceInvocationsBusinessFilters.add(filter); } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#isContainedInOwnedServiceInvocationsTable(EObject element) * */ public boolean isContainedInOwnedServiceInvocationsTable(EObject element) { return ((ReferencesTableSettings)ownedServiceInvocations.getInput()).contains(element); } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#getPart() * */ public Enumerator getPart() { Enumerator selection = (Enumerator) ((StructuredSelection) part.getSelection()).getFirstElement(); return selection; } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#initPart(Object input, Enumerator current) */ public void initPart(Object input, Enumerator current) { part.setInput(input); part.modelUpdating(new StructuredSelection(current)); boolean eefElementEditorReadOnlyState = isReadOnly(EipViewsRepository.Enricher.Properties.part); if (eefElementEditorReadOnlyState && part.isEnabled()) { part.setEnabled(false); part.setToolTipText(EipMessages.Enricher_ReadOnly); } else if (!eefElementEditorReadOnlyState && !part.isEnabled()) { part.setEnabled(true); } } /** * {@inheritDoc} * * @see com.github.lbroudoux.dsl.eip.parts.EnricherPropertiesEditionPart#setPart(Enumerator newValue) * */ public void setPart(Enumerator newValue) { part.modelUpdating(new StructuredSelection(newValue)); boolean eefElementEditorReadOnlyState = isReadOnly(EipViewsRepository.Enricher.Properties.part); if (eefElementEditorReadOnlyState && part.isEnabled()) { part.setEnabled(false); part.setToolTipText(EipMessages.Enricher_ReadOnly); } else if (!eefElementEditorReadOnlyState && !part.isEnabled()) { part.setEnabled(true); } } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle() * */ public String getTitle() { return EipMessages.Enricher_Part_Title; } // Start of user code additional methods // End of user code }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.diff.tools.fragmented; import com.intellij.diff.DiffContext; import com.intellij.diff.actions.BufferedLineIterator; import com.intellij.diff.actions.NavigationContextChecker; import com.intellij.diff.actions.impl.OpenInEditorWithMouseAction; import com.intellij.diff.actions.impl.SetEditorSettingsAction; import com.intellij.diff.comparison.DiffTooBigException; import com.intellij.diff.contents.DocumentContent; import com.intellij.diff.fragments.LineFragment; import com.intellij.diff.requests.ContentDiffRequest; import com.intellij.diff.requests.DiffRequest; import com.intellij.diff.tools.util.*; import com.intellij.diff.tools.util.base.*; import com.intellij.diff.tools.util.side.TwosideTextDiffViewer; import com.intellij.diff.util.*; import com.intellij.diff.util.DiffUserDataKeysEx.ScrollToPolicy; import com.intellij.icons.AllIcons; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.undo.UndoManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.diff.LineTokenizer; import com.intellij.openapi.editor.*; import com.intellij.openapi.editor.actionSystem.EditorActionManager; import com.intellij.openapi.editor.actionSystem.ReadonlyFragmentModificationHandler; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.editor.event.DocumentAdapter; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.highlighter.EditorHighlighter; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.UserDataHolder; import com.intellij.util.Function; import com.intellij.util.containers.ContainerUtil; import gnu.trove.TIntFunction; import org.jetbrains.annotations.*; import javax.swing.*; import java.util.*; import static com.intellij.diff.util.DiffUtil.getLineCount; public class UnifiedDiffViewer extends ListenerDiffViewerBase { public static final Logger LOG = Logger.getInstance(UnifiedDiffViewer.class); @NotNull protected final EditorEx myEditor; @NotNull protected final Document myDocument; @NotNull private final UnifiedDiffPanel myPanel; @NotNull private final SetEditorSettingsAction myEditorSettingsAction; @NotNull private final PrevNextDifferenceIterable myPrevNextDifferenceIterable; @NotNull private final MyStatusPanel myStatusPanel; @NotNull private final MyInitialScrollHelper myInitialScrollHelper = new MyInitialScrollHelper(); @NotNull private final MyFoldingModel myFoldingModel; @NotNull protected Side myMasterSide = Side.RIGHT; @Nullable private ChangedBlockData myChangedBlockData; private final boolean[] myForceReadOnlyFlags; private boolean myReadOnlyLockSet = false; private boolean myDuringOnesideDocumentModification; private boolean myDuringTwosideDocumentModification; private boolean myStateIsOutOfDate; // whether something was changed since last rediff private boolean mySuppressEditorTyping; // our state is inconsistent. No typing can be handled correctly public UnifiedDiffViewer(@NotNull DiffContext context, @NotNull DiffRequest request) { super(context, (ContentDiffRequest)request); myPrevNextDifferenceIterable = new MyPrevNextDifferenceIterable(); myStatusPanel = new MyStatusPanel(); myForceReadOnlyFlags = TextDiffViewerUtil.checkForceReadOnly(myContext, myRequest); boolean leftEditable = isEditable(Side.LEFT, false); boolean rightEditable = isEditable(Side.RIGHT, false); if (leftEditable && !rightEditable) myMasterSide = Side.LEFT; if (!leftEditable && rightEditable) myMasterSide = Side.RIGHT; myDocument = EditorFactory.getInstance().createDocument(""); myEditor = DiffUtil.createEditor(myDocument, myProject, true, true); List<JComponent> titles = DiffUtil.createTextTitles(myRequest, ContainerUtil.list(myEditor, myEditor)); UnifiedContentPanel contentPanel = new UnifiedContentPanel(titles, myEditor); myPanel = new UnifiedDiffPanel(myProject, contentPanel, this, myContext); myFoldingModel = new MyFoldingModel(myEditor, this); myEditorSettingsAction = new SetEditorSettingsAction(getTextSettings(), getEditors()); myEditorSettingsAction.applyDefaults(); new MyOpenInEditorWithMouseAction().register(getEditors()); TextDiffViewerUtil.checkDifferentDocuments(myRequest); DiffUtil.registerAction(new ReplaceSelectedChangesAction(Side.LEFT, true), myPanel); DiffUtil.registerAction(new AppendSelectedChangesAction(Side.LEFT, true), myPanel); DiffUtil.registerAction(new ReplaceSelectedChangesAction(Side.RIGHT, true), myPanel); DiffUtil.registerAction(new AppendSelectedChangesAction(Side.RIGHT, true), myPanel); } @Override @CalledInAwt protected void onInit() { super.onInit(); installEditorListeners(); installTypingSupport(); myPanel.setLoadingContent(); // We need loading panel only for initial rediff() myPanel.setPersistentNotifications(DiffUtil.getCustomNotifications(myContext, myRequest)); } @Override @CalledInAwt protected void onDispose() { super.onDispose(); EditorFactory.getInstance().releaseEditor(myEditor); } @Override @CalledInAwt protected void processContextHints() { super.processContextHints(); Side side = DiffUtil.getUserData(myRequest, myContext, DiffUserDataKeys.MASTER_SIDE); if (side != null) myMasterSide = side; myInitialScrollHelper.processContext(myRequest); } @Override @CalledInAwt protected void updateContextHints() { super.updateContextHints(); myInitialScrollHelper.updateContext(myRequest); myFoldingModel.updateContext(myRequest, getFoldingModelSettings()); } @CalledInAwt protected void updateEditorCanBeTyped() { myEditor.setViewer(mySuppressEditorTyping || !isEditable(myMasterSide, true)); } private void installTypingSupport() { if (!isEditable(myMasterSide, false)) return; updateEditorCanBeTyped(); myEditor.getColorsScheme().setColor(EditorColors.READONLY_FRAGMENT_BACKGROUND_COLOR, null); // guarded blocks EditorActionManager.getInstance().setReadonlyFragmentModificationHandler(myDocument, new MyReadonlyFragmentModificationHandler()); myDocument.putUserData(UndoManager.ORIGINAL_DOCUMENT, getDocument(myMasterSide)); // use undo of master document myDocument.addDocumentListener(new MyOnesideDocumentListener()); } @CalledInAwt @NotNull public List<AnAction> createToolbarActions() { List<AnAction> group = new ArrayList<AnAction>(); // TODO: allow to choose myMasterSide group.add(new MyIgnorePolicySettingAction()); group.add(new MyHighlightPolicySettingAction()); group.add(new MyToggleExpandByDefaultAction()); group.add(new MyReadOnlyLockAction()); group.add(myEditorSettingsAction); group.add(Separator.getInstance()); group.addAll(super.createToolbarActions()); return group; } @CalledInAwt @NotNull public List<AnAction> createPopupActions() { List<AnAction> group = new ArrayList<AnAction>(); group.add(Separator.getInstance()); group.add(new MyIgnorePolicySettingAction().getPopupGroup()); group.add(Separator.getInstance()); group.add(new MyHighlightPolicySettingAction().getPopupGroup()); group.add(Separator.getInstance()); group.add(new MyToggleExpandByDefaultAction()); group.add(Separator.getInstance()); group.addAll(super.createPopupActions()); return group; } @NotNull protected List<AnAction> createEditorPopupActions() { List<AnAction> group = new ArrayList<AnAction>(); group.add(new ReplaceSelectedChangesAction(Side.LEFT, false)); group.add(new AppendSelectedChangesAction(Side.LEFT, false)); group.add(new ReplaceSelectedChangesAction(Side.RIGHT, false)); group.add(new AppendSelectedChangesAction(Side.RIGHT, false)); group.add(new RevertSelectedChangesAction(Side.LEFT)); group.add(new RevertSelectedChangesAction(Side.RIGHT)); group.add(Separator.getInstance()); group.addAll(TextDiffViewerUtil.createEditorPopupActions()); return group; } @CalledInAwt protected void installEditorListeners() { new TextDiffViewerUtil.EditorActionsPopup(createEditorPopupActions()).install(getEditors()); } // // Diff // @Override @CalledInAwt protected void onSlowRediff() { super.onSlowRediff(); myStatusPanel.setBusy(true); } @Override @NotNull protected Runnable performRediff(@NotNull final ProgressIndicator indicator) { try { indicator.checkCanceled(); final Document document1 = getContent1().getDocument(); final Document document2 = getContent2().getDocument(); final CharSequence[] texts = ApplicationManager.getApplication().runReadAction(new Computable<CharSequence[]>() { @Override public CharSequence[] compute() { return new CharSequence[]{document1.getImmutableCharSequence(), document2.getImmutableCharSequence()}; } }); final boolean innerFragments = getDiffConfig().innerFragments; final List<LineFragment> fragments = DiffUtil.compare(texts[0], texts[1], getDiffConfig(), indicator); final DocumentContent content1 = getContent1(); final DocumentContent content2 = getContent2(); indicator.checkCanceled(); TwosideDocumentData data = ApplicationManager.getApplication().runReadAction(new Computable<TwosideDocumentData>() { @Override public TwosideDocumentData compute() { indicator.checkCanceled(); UnifiedFragmentBuilder builder = new UnifiedFragmentBuilder(fragments, document1, document2, myMasterSide); builder.exec(); indicator.checkCanceled(); EditorHighlighter highlighter = buildHighlighter(myProject, content1, content2, texts[0], texts[1], builder.getRanges(), builder.getText().length()); UnifiedEditorRangeHighlighter rangeHighlighter = new UnifiedEditorRangeHighlighter(myProject, document1, document2, builder.getRanges()); return new TwosideDocumentData(builder, highlighter, rangeHighlighter); } }); UnifiedFragmentBuilder builder = data.getBuilder(); FileType fileType = content2.getContentType() == null ? content1.getContentType() : content2.getContentType(); LineNumberConvertor convertor = builder.getConvertor(); List<LineRange> changedLines = builder.getChangedLines(); boolean isEqual = builder.isEqual(); CombinedEditorData editorData = new CombinedEditorData(builder.getText(), data.getHighlighter(), data.getRangeHighlighter(), fileType, convertor.createConvertor1(), convertor.createConvertor2()); return apply(editorData, builder.getBlocks(), convertor, changedLines, isEqual, innerFragments); } catch (DiffTooBigException e) { return new Runnable() { @Override public void run() { clearDiffPresentation(); myPanel.setTooBigContent(); } }; } catch (ProcessCanceledException e) { throw e; } catch (Throwable e) { LOG.error(e); return new Runnable() { @Override public void run() { clearDiffPresentation(); myPanel.setErrorContent(); } }; } } private void clearDiffPresentation() { myPanel.resetNotifications(); myStatusPanel.setBusy(false); destroyChangedBlockData(); myStateIsOutOfDate = false; mySuppressEditorTyping = false; updateEditorCanBeTyped(); } @CalledInAwt protected void markSuppressEditorTyping() { mySuppressEditorTyping = true; updateEditorCanBeTyped(); } @CalledInAwt protected void markStateIsOutOfDate() { myStateIsOutOfDate = true; if (myChangedBlockData != null) { for (UnifiedDiffChange diffChange : myChangedBlockData.getDiffChanges()) { diffChange.updateGutterActions(); } } } @Nullable private EditorHighlighter buildHighlighter(@Nullable Project project, @NotNull DocumentContent content1, @NotNull DocumentContent content2, @NotNull CharSequence text1, @NotNull CharSequence text2, @NotNull List<HighlightRange> ranges, int textLength) { EditorHighlighter highlighter1 = DiffUtil.initEditorHighlighter(project, content1, text1); EditorHighlighter highlighter2 = DiffUtil.initEditorHighlighter(project, content2, text2); if (highlighter1 == null && highlighter2 == null) return null; if (highlighter1 == null) highlighter1 = DiffUtil.initEmptyEditorHighlighter(text1); if (highlighter2 == null) highlighter2 = DiffUtil.initEmptyEditorHighlighter(text2); return new UnifiedEditorHighlighter(myDocument, highlighter1, highlighter2, ranges, textLength); } @NotNull private Runnable apply(@NotNull final CombinedEditorData data, @NotNull final List<ChangedBlock> blocks, @NotNull final LineNumberConvertor convertor, @NotNull final List<LineRange> changedLines, final boolean isEqual, final boolean innerFragments) { return new Runnable() { @Override public void run() { myFoldingModel.updateContext(myRequest, getFoldingModelSettings()); clearDiffPresentation(); if (isEqual) myPanel.addNotification(DiffNotifications.EQUAL_CONTENTS); TIntFunction separatorLines = myFoldingModel.getLineNumberConvertor(); myEditor.getGutterComponentEx().setLineNumberConvertor(mergeConverters(data.getLineConvertor1(), separatorLines), mergeConverters(data.getLineConvertor2(), separatorLines)); ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { myDuringOnesideDocumentModification = true; try { myDocument.setText(data.getText()); } finally { myDuringOnesideDocumentModification = false; } } }); if (data.getHighlighter() != null) myEditor.setHighlighter(data.getHighlighter()); DiffUtil.setEditorCodeStyle(myProject, myEditor, data.getFileType()); if (data.getRangeHighlighter() != null) data.getRangeHighlighter().apply(myProject, myDocument); ArrayList<UnifiedDiffChange> diffChanges = new ArrayList<UnifiedDiffChange>(blocks.size()); for (ChangedBlock block : blocks) { diffChanges.add(new UnifiedDiffChange(UnifiedDiffViewer.this, block, innerFragments)); } List<RangeMarker> guarderRangeBlocks = new ArrayList<RangeMarker>(); if (!myEditor.isViewer()) { for (ChangedBlock block : blocks) { int start = myMasterSide.select(block.getStartOffset2(), block.getStartOffset1()); int end = myMasterSide.select(block.getEndOffset2() - 1, block.getEndOffset1() - 1); if (start >= end) continue; guarderRangeBlocks.add(createGuardedBlock(start, end)); } int textLength = myDocument.getTextLength(); // there are 'fake' newline at the very end guarderRangeBlocks.add(createGuardedBlock(textLength, textLength)); } myChangedBlockData = new ChangedBlockData(diffChanges, guarderRangeBlocks, convertor); myFoldingModel.install(changedLines, myRequest, getFoldingModelSettings()); myInitialScrollHelper.onRediff(); myStatusPanel.update(); myPanel.setGoodContent(); } }; } @NotNull private RangeMarker createGuardedBlock(int start, int end) { RangeMarker block = myDocument.createGuardedBlock(start, end); block.setGreedyToLeft(true); block.setGreedyToRight(true); return block; } @Contract("!null, _ -> !null") private static TIntFunction mergeConverters(@NotNull final TIntFunction convertor, @NotNull final TIntFunction separatorLines) { return new TIntFunction() { @Override public int execute(int value) { return convertor.execute(separatorLines.execute(value)); } }; } /* * This convertor returns -1 if exact matching is impossible */ @CalledInAwt public int transferLineToOnesideStrict(@NotNull Side side, int line) { if (myChangedBlockData == null) return -1; LineNumberConvertor lineConvertor = myChangedBlockData.getLineNumberConvertor(); return side.isLeft() ? lineConvertor.convertInv1(line) : lineConvertor.convertInv2(line); } /* * This convertor returns -1 if exact matching is impossible */ @CalledInAwt public int transferLineFromOnesideStrict(@NotNull Side side, int line) { if (myChangedBlockData == null) return -1; LineNumberConvertor lineConvertor = myChangedBlockData.getLineNumberConvertor(); return side.isLeft() ? lineConvertor.convert1(line) : lineConvertor.convert2(line); } /* * This convertor returns 'good enough' position, even if exact matching is impossible */ @CalledInAwt public int transferLineToOneside(@NotNull Side side, int line) { if (myChangedBlockData == null) return line; LineNumberConvertor lineConvertor = myChangedBlockData.getLineNumberConvertor(); return side.isLeft() ? lineConvertor.convertApproximateInv1(line) : lineConvertor.convertApproximateInv2(line); } /* * This convertor returns 'good enough' position, even if exact matching is impossible */ @CalledInAwt @NotNull public Pair<int[], Side> transferLineFromOneside(int line) { int[] lines = new int[2]; if (myChangedBlockData == null) { lines[0] = line; lines[1] = line; return Pair.create(lines, myMasterSide); } LineNumberConvertor lineConvertor = myChangedBlockData.getLineNumberConvertor(); Side side = myMasterSide; lines[0] = lineConvertor.convert1(line); lines[1] = lineConvertor.convert2(line); if (lines[0] == -1 && lines[1] == -1) { lines[0] = lineConvertor.convertApproximate1(line); lines[1] = lineConvertor.convertApproximate2(line); } else if (lines[0] == -1) { lines[0] = lineConvertor.convertApproximate1(line); side = Side.RIGHT; } else if (lines[1] == -1) { lines[1] = lineConvertor.convertApproximate2(line); side = Side.LEFT; } return Pair.create(lines, side); } @CalledInAwt private void destroyChangedBlockData() { if (myChangedBlockData == null) return; for (UnifiedDiffChange change : myChangedBlockData.getDiffChanges()) { change.destroyHighlighter(); } for (RangeMarker block : myChangedBlockData.getGuardedRangeBlocks()) { myDocument.removeGuardedBlock(block); } myChangedBlockData = null; UnifiedEditorRangeHighlighter.erase(myProject, myDocument); myFoldingModel.destroy(); myStatusPanel.update(); } // // Typing // private class MyOnesideDocumentListener extends DocumentAdapter { @Override public void beforeDocumentChange(DocumentEvent e) { if (myDuringOnesideDocumentModification) return; if (myChangedBlockData == null) { LOG.warn("oneside beforeDocumentChange - myChangedBlockData == null"); return; } // TODO: modify Document guard range logic - we can handle case, when whole read-only block is modified (ex: my replacing selection). try { myDuringTwosideDocumentModification = true; Document twosideDocument = getDocument(myMasterSide); LineCol onesideStartPosition = LineCol.fromOffset(myDocument, e.getOffset()); LineCol onesideEndPosition = LineCol.fromOffset(myDocument, e.getOffset() + e.getOldLength()); int line1 = onesideStartPosition.line; int line2 = onesideEndPosition.line + 1; int shift = DiffUtil.countLinesShift(e); int twosideStartLine = transferLineFromOnesideStrict(myMasterSide, onesideStartPosition.line); int twosideEndLine = transferLineFromOnesideStrict(myMasterSide, onesideEndPosition.line); if (twosideStartLine == -1 || twosideEndLine == -1) { // this should never happen logDebugInfo(e, onesideStartPosition, onesideEndPosition, twosideStartLine, twosideEndLine); markSuppressEditorTyping(); return; } int twosideStartOffset = twosideDocument.getLineStartOffset(twosideStartLine) + onesideStartPosition.column; int twosideEndOffset = twosideDocument.getLineStartOffset(twosideEndLine) + onesideEndPosition.column; twosideDocument.replaceString(twosideStartOffset, twosideEndOffset, e.getNewFragment()); for (UnifiedDiffChange change : myChangedBlockData.getDiffChanges()) { change.processChange(line1, line2, shift); } LineNumberConvertor lineNumberConvertor = myChangedBlockData.getLineNumberConvertor(); lineNumberConvertor.handleOnesideChange(line1, line2, shift, myMasterSide); } finally { // TODO: we can avoid marking state out-of-date in some simple cases (like in SimpleDiffViewer) // but this will greatly increase complexity, so let's wait if it's actually required by users markStateIsOutOfDate(); myFoldingModel.onDocumentChanged(e); scheduleRediff(); myDuringTwosideDocumentModification = false; } } private void logDebugInfo(DocumentEvent e, LineCol onesideStartPosition, LineCol onesideEndPosition, int twosideStartLine, int twosideEndLine) { StringBuilder info = new StringBuilder(); Document document1 = getDocument(Side.LEFT); Document document2 = getDocument(Side.RIGHT); info.append("==== UnifiedDiffViewer Debug Info ===="); info.append("myMasterSide - ").append(myMasterSide).append('\n'); info.append("myLeftDocument.length() - ").append(document1.getTextLength()).append('\n'); info.append("myRightDocument.length() - ").append(document2.getTextLength()).append('\n'); info.append("myDocument.length() - ").append(myDocument.getTextLength()).append('\n'); info.append("e.getOffset() - ").append(e.getOffset()).append('\n'); info.append("e.getNewLength() - ").append(e.getNewLength()).append('\n'); info.append("e.getOldLength() - ").append(e.getOldLength()).append('\n'); info.append("onesideStartPosition - ").append(onesideStartPosition).append('\n'); info.append("onesideEndPosition - ").append(onesideEndPosition).append('\n'); info.append("twosideStartLine - ").append(twosideStartLine).append('\n'); info.append("twosideEndLine - ").append(twosideEndLine).append('\n'); Pair<int[], Side> pair1 = transferLineFromOneside(onesideStartPosition.line); Pair<int[], Side> pair2 = transferLineFromOneside(onesideEndPosition.line); info.append("non-strict transferStartLine - ").append(pair1.first[0]).append("-").append(pair1.first[1]) .append(":").append(pair1.second).append('\n'); info.append("non-strict transferEndLine - ").append(pair2.first[0]).append("-").append(pair2.first[1]) .append(":").append(pair2.second).append('\n'); info.append("---- UnifiedDiffViewer Debug Info ----"); LOG.warn(info.toString()); } } @Override protected void onDocumentChange(@NotNull DocumentEvent e) { if (myDuringTwosideDocumentModification) return; markStateIsOutOfDate(); markSuppressEditorTyping(); myFoldingModel.onDocumentChanged(e); scheduleRediff(); } // // Modification operations // private abstract class ApplySelectedChangesActionBase extends AnAction implements DumbAware { @NotNull protected final Side myModifiedSide; private final boolean myShortcut; public ApplySelectedChangesActionBase(@NotNull Side modifiedSide, boolean shortcut) { myModifiedSide = modifiedSide; myShortcut = shortcut; } @Override public void update(@NotNull AnActionEvent e) { if (myShortcut) { // consume shortcut even if there are nothing to do - avoid calling some other action e.getPresentation().setEnabledAndVisible(true); return; } Editor editor = e.getData(CommonDataKeys.EDITOR); if (editor != getEditor()) { e.getPresentation().setEnabledAndVisible(false); return; } if (!isEditable(myModifiedSide, true) || isStateIsOutOfDate()) { e.getPresentation().setEnabledAndVisible(false); return; } e.getPresentation().setVisible(true); e.getPresentation().setEnabled(isSomeChangeSelected()); } @Override public void actionPerformed(@NotNull final AnActionEvent e) { final List<UnifiedDiffChange> selectedChanges = getSelectedChanges(); if (selectedChanges.isEmpty()) return; if (!isEditable(myModifiedSide, true)) return; if (isStateIsOutOfDate()) return; String title = e.getPresentation().getText() + " selected changes"; DiffUtil.executeWriteCommand(getDocument(myModifiedSide), e.getProject(), title, new Runnable() { @Override public void run() { // state is invalidated during apply(), but changes are in reverse order, so they should not conflict with each other apply(selectedChanges); scheduleRediff(); } }); } protected boolean isSomeChangeSelected() { if (myChangedBlockData == null) return false; List<UnifiedDiffChange> changes = myChangedBlockData.getDiffChanges(); if (changes.isEmpty()) return false; List<Caret> carets = getEditor().getCaretModel().getAllCarets(); if (carets.size() != 1) return true; Caret caret = carets.get(0); if (caret.hasSelection()) return true; int line = getEditor().getDocument().getLineNumber(getEditor().getExpectedCaretOffset()); for (UnifiedDiffChange change : changes) { if (DiffUtil.isSelectedByLine(line, change.getLine1(), change.getLine2())) return true; } return false; } @CalledWithWriteLock protected abstract void apply(@NotNull List<UnifiedDiffChange> changes); } private class ReplaceSelectedChangesAction extends ApplySelectedChangesActionBase { public ReplaceSelectedChangesAction(@NotNull Side focusedSide, boolean shortcut) { super(focusedSide.other(), shortcut); setShortcutSet(ActionManager.getInstance().getAction(focusedSide.select("Diff.ApplyLeftSide", "Diff.ApplyRightSide")).getShortcutSet()); getTemplatePresentation().setText("Replace"); getTemplatePresentation().setIcon(focusedSide.select(AllIcons.Diff.ArrowRight, AllIcons.Diff.Arrow)); } @Override protected void apply(@NotNull List<UnifiedDiffChange> changes) { for (UnifiedDiffChange change : changes) { replaceChange(change, myModifiedSide.other()); } } } private class AppendSelectedChangesAction extends ApplySelectedChangesActionBase { public AppendSelectedChangesAction(@NotNull Side focusedSide, boolean shortcut) { super(focusedSide.other(), shortcut); setShortcutSet(ActionManager.getInstance().getAction(focusedSide.select("Diff.AppendLeftSide", "Diff.AppendRightSide")).getShortcutSet()); getTemplatePresentation().setText("Insert"); getTemplatePresentation().setIcon(focusedSide.select(AllIcons.Diff.ArrowRightDown, AllIcons.Diff.ArrowLeftDown)); } @Override protected void apply(@NotNull List<UnifiedDiffChange> changes) { for (UnifiedDiffChange change : changes) { appendChange(change, myModifiedSide.other()); } } } private class RevertSelectedChangesAction extends ApplySelectedChangesActionBase { public RevertSelectedChangesAction(@NotNull Side focusedSide) { super(focusedSide, false); getTemplatePresentation().setText("Revert"); getTemplatePresentation().setIcon(AllIcons.Diff.Remove); } @Override protected void apply(@NotNull List<UnifiedDiffChange> changes) { for (UnifiedDiffChange change : changes) { replaceChange(change, myModifiedSide.other()); } } } @CalledWithWriteLock public void replaceChange(@NotNull UnifiedDiffChange change, @NotNull Side sourceSide) { Side outputSide = sourceSide.other(); Document document1 = getDocument(Side.LEFT); Document document2 = getDocument(Side.RIGHT); LineFragment lineFragment = change.getLineFragment(); DiffUtil.applyModification(outputSide.select(document1, document2), outputSide.getStartLine(lineFragment), outputSide.getEndLine(lineFragment), sourceSide.select(document1, document2), sourceSide.getStartLine(lineFragment), sourceSide.getEndLine(lineFragment)); // no need to mark myStateIsOutOfDate - it will be made by DocumentListener // TODO: we can apply change manually, without marking state out-of-date. But we'll have to schedule rediff anyway. } @CalledWithWriteLock public void appendChange(@NotNull UnifiedDiffChange change, @NotNull final Side sourceSide) { Side outputSide = sourceSide.other(); Document document1 = getDocument(Side.LEFT); Document document2 = getDocument(Side.RIGHT); LineFragment lineFragment = change.getLineFragment(); if (sourceSide.getStartLine(lineFragment) == sourceSide.getEndLine(lineFragment)) return; DiffUtil.applyModification(outputSide.select(document1, document2), outputSide.getEndLine(lineFragment), outputSide.getEndLine(lineFragment), sourceSide.select(document1, document2), sourceSide.getStartLine(lineFragment), sourceSide.getEndLine(lineFragment)); } // // Impl // @NotNull public TextDiffSettingsHolder.TextDiffSettings getTextSettings() { return TextDiffViewerUtil.getTextSettings(myContext); } @NotNull public FoldingModelSupport.Settings getFoldingModelSettings() { return TextDiffViewerUtil.getFoldingModelSettings(myContext); } @NotNull private DiffUtil.DiffConfig getDiffConfig() { return new DiffUtil.DiffConfig(getIgnorePolicy(), getHighlightPolicy()); } @NotNull private HighlightPolicy getHighlightPolicy() { HighlightPolicy policy = getTextSettings().getHighlightPolicy(); if (policy == HighlightPolicy.DO_NOT_HIGHLIGHT) return HighlightPolicy.BY_LINE; return policy; } @NotNull private IgnorePolicy getIgnorePolicy() { IgnorePolicy policy = getTextSettings().getIgnorePolicy(); if (policy == IgnorePolicy.IGNORE_WHITESPACES_CHUNKS) return IgnorePolicy.IGNORE_WHITESPACES; return policy; } // // Getters // @NotNull public Side getMasterSide() { return myMasterSide; } @NotNull public EditorEx getEditor() { return myEditor; } @NotNull protected List<? extends EditorEx> getEditors() { return Collections.singletonList(myEditor); } @NotNull protected List<? extends DocumentContent> getContents() { //noinspection unchecked return (List<? extends DocumentContent>)(List)myRequest.getContents(); } @NotNull protected DocumentContent getContent(@NotNull Side side) { return side.select(getContents()); } @NotNull protected DocumentContent getContent1() { return getContent(Side.LEFT); } @NotNull protected DocumentContent getContent2() { return getContent(Side.RIGHT); } @CalledInAwt @Nullable protected List<UnifiedDiffChange> getDiffChanges() { return myChangedBlockData == null ? null : myChangedBlockData.getDiffChanges(); } @NotNull @Override public JComponent getComponent() { return myPanel; } @Nullable @Override public JComponent getPreferredFocusedComponent() { if (!myPanel.isGoodContent()) return null; return myEditor.getContentComponent(); } @NotNull @Override protected JComponent getStatusPanel() { return myStatusPanel; } @CalledInAwt public boolean isEditable(@NotNull Side side, boolean respectReadOnlyLock) { if (myReadOnlyLockSet && respectReadOnlyLock) return false; if (side.select(myForceReadOnlyFlags)) return false; return DiffUtil.canMakeWritable(getDocument(side)); } @NotNull public Document getDocument(@NotNull Side side) { return getContent(side).getDocument(); } protected boolean isStateIsOutOfDate() { return myStateIsOutOfDate; } // // Misc // @Nullable @Override protected OpenFileDescriptor getOpenFileDescriptor() { return getOpenFileDescriptor(myEditor.getCaretModel().getOffset()); } @CalledInAwt @Nullable protected UnifiedDiffChange getCurrentChange() { if (myChangedBlockData == null) return null; int caretLine = myEditor.getCaretModel().getLogicalPosition().line; for (UnifiedDiffChange change : myChangedBlockData.getDiffChanges()) { if (DiffUtil.isSelectedByLine(caretLine, change.getLine1(), change.getLine2())) return change; } return null; } @NotNull @CalledInAwt private List<UnifiedDiffChange> getSelectedChanges() { if (myChangedBlockData == null) return Collections.emptyList(); final BitSet lines = DiffUtil.getSelectedLines(myEditor); List<UnifiedDiffChange> changes = myChangedBlockData.getDiffChanges(); List<UnifiedDiffChange> affectedChanges = new ArrayList<UnifiedDiffChange>(); for (int i = changes.size() - 1; i >= 0; i--) { UnifiedDiffChange change = changes.get(i); int line1 = change.getLine1(); int line2 = change.getLine2(); if (DiffUtil.isSelectedByLine(lines, line1, line2)) { affectedChanges.add(change); } } return affectedChanges; } @CalledInAwt @Nullable protected OpenFileDescriptor getOpenFileDescriptor(int offset) { LogicalPosition position = myEditor.offsetToLogicalPosition(offset); Pair<int[], Side> pair = transferLineFromOneside(position.line); int offset1 = DiffUtil.getOffset(getContent1().getDocument(), pair.first[0], position.column); int offset2 = DiffUtil.getOffset(getContent2().getDocument(), pair.first[1], position.column); // TODO: issue: non-optimal GoToSource position with caret on deleted block for "Compare with local" // we should transfer using calculated diff, not jump to "somehow related" position from old content's descriptor OpenFileDescriptor descriptor1 = getContent1().getOpenFileDescriptor(offset1); OpenFileDescriptor descriptor2 = getContent2().getOpenFileDescriptor(offset2); if (descriptor1 == null) return descriptor2; if (descriptor2 == null) return descriptor1; return pair.second.select(descriptor1, descriptor2); } public static boolean canShowRequest(@NotNull DiffContext context, @NotNull DiffRequest request) { return TwosideTextDiffViewer.canShowRequest(context, request); } // // Actions // private class MyPrevNextDifferenceIterable extends PrevNextDifferenceIterableBase<UnifiedDiffChange> { @NotNull @Override protected List<UnifiedDiffChange> getChanges() { return ContainerUtil.notNullize(getDiffChanges()); } @NotNull @Override protected EditorEx getEditor() { return myEditor; } @Override protected int getStartLine(@NotNull UnifiedDiffChange change) { return change.getLine1(); } @Override protected int getEndLine(@NotNull UnifiedDiffChange change) { return change.getLine2(); } @Override protected void scrollToChange(@NotNull UnifiedDiffChange change) { DiffUtil.scrollEditor(myEditor, change.getLine1(), true); } } private class MyOpenInEditorWithMouseAction extends OpenInEditorWithMouseAction { @Override protected OpenFileDescriptor getDescriptor(@NotNull Editor editor, int line) { if (editor != myEditor) return null; return getOpenFileDescriptor(myEditor.logicalPositionToOffset(new LogicalPosition(line, 0))); } } private class MyToggleExpandByDefaultAction extends TextDiffViewerUtil.ToggleExpandByDefaultAction { public MyToggleExpandByDefaultAction() { super(getTextSettings()); } @Override protected void expandAll(boolean expand) { myFoldingModel.expandAll(expand); } } private class MyHighlightPolicySettingAction extends TextDiffViewerUtil.HighlightPolicySettingAction { public MyHighlightPolicySettingAction() { super(getTextSettings()); } @NotNull @Override protected HighlightPolicy getCurrentSetting() { return getHighlightPolicy(); } @NotNull @Override protected List<HighlightPolicy> getAvailableSettings() { ArrayList<HighlightPolicy> settings = ContainerUtil.newArrayList(HighlightPolicy.values()); settings.remove(HighlightPolicy.DO_NOT_HIGHLIGHT); return settings; } @Override protected void onSettingsChanged() { rediff(); } } private class MyIgnorePolicySettingAction extends TextDiffViewerUtil.IgnorePolicySettingAction { public MyIgnorePolicySettingAction() { super(getTextSettings()); } @NotNull @Override protected IgnorePolicy getCurrentSetting() { return getIgnorePolicy(); } @NotNull @Override protected List<IgnorePolicy> getAvailableSettings() { ArrayList<IgnorePolicy> settings = ContainerUtil.newArrayList(IgnorePolicy.values()); settings.remove(IgnorePolicy.IGNORE_WHITESPACES_CHUNKS); return settings; } @Override protected void onSettingsChanged() { rediff(); } } private class MyReadOnlyLockAction extends TextDiffViewerUtil.ReadOnlyLockAction { public MyReadOnlyLockAction() { super(getContext()); init(); } @Override protected void doApply(boolean readOnly) { myReadOnlyLockSet = readOnly; if (myChangedBlockData != null) { for (UnifiedDiffChange unifiedDiffChange : myChangedBlockData.getDiffChanges()) { unifiedDiffChange.updateGutterActions(); } } updateEditorCanBeTyped(); } @Override protected boolean canEdit() { return !myForceReadOnlyFlags[0] && DiffUtil.canMakeWritable(getContent1().getDocument()) || !myForceReadOnlyFlags[1] && DiffUtil.canMakeWritable(getContent2().getDocument()); } } // // Scroll from annotate // private class AllLinesIterator implements Iterator<Pair<Integer, CharSequence>> { @NotNull private final Side mySide; @NotNull private final Document myDocument; private int myLine = 0; private AllLinesIterator(@NotNull Side side) { mySide = side; myDocument = getContent(mySide).getDocument(); } @Override public boolean hasNext() { return myLine < getLineCount(myDocument); } @Override public Pair<Integer, CharSequence> next() { int offset1 = myDocument.getLineStartOffset(myLine); int offset2 = myDocument.getLineEndOffset(myLine); CharSequence text = myDocument.getImmutableCharSequence().subSequence(offset1, offset2); Pair<Integer, CharSequence> pair = new Pair<Integer, CharSequence>(myLine, text); myLine++; return pair; } @Override public void remove() { throw new UnsupportedOperationException(); } } private class ChangedLinesIterator extends BufferedLineIterator { @NotNull private final Side mySide; @NotNull private final List<UnifiedDiffChange> myChanges; private int myIndex = 0; private ChangedLinesIterator(@NotNull Side side, @NotNull List<UnifiedDiffChange> changes) { mySide = side; myChanges = changes; init(); } @Override public boolean hasNextBlock() { return myIndex < myChanges.size(); } @Override public void loadNextBlock() { LOG.assertTrue(!myStateIsOutOfDate); UnifiedDiffChange change = myChanges.get(myIndex); myIndex++; LineFragment lineFragment = change.getLineFragment(); int insertedStart = lineFragment.getStartOffset2(); int insertedEnd = lineFragment.getEndOffset2(); CharSequence insertedText = getContent(mySide).getDocument().getCharsSequence().subSequence(insertedStart, insertedEnd); int lineNumber = lineFragment.getStartLine2(); LineTokenizer tokenizer = new LineTokenizer(insertedText.toString()); for (String line : tokenizer.execute()) { addLine(lineNumber, line); lineNumber++; } } } // // Helpers // @Nullable @Override public Object getData(@NonNls String dataId) { if (DiffDataKeys.PREV_NEXT_DIFFERENCE_ITERABLE.is(dataId)) { return myPrevNextDifferenceIterable; } else if (CommonDataKeys.VIRTUAL_FILE.is(dataId)) { return DiffUtil.getVirtualFile(myRequest, myMasterSide); } else if (DiffDataKeys.CURRENT_EDITOR.is(dataId)) { return myEditor; } else if (DiffDataKeys.CURRENT_CHANGE_RANGE.is(dataId)) { UnifiedDiffChange change = getCurrentChange(); if (change != null) { return new LineRange(change.getLine1(), change.getLine2()); } } return super.getData(dataId); } private class MyStatusPanel extends StatusPanel { @Override protected int getChangesCount() { return myChangedBlockData == null ? 0 : myChangedBlockData.getDiffChanges().size(); } } private static class TwosideDocumentData { @NotNull private final UnifiedFragmentBuilder myBuilder; @Nullable private final EditorHighlighter myHighlighter; @Nullable private final UnifiedEditorRangeHighlighter myRangeHighlighter; public TwosideDocumentData(@NotNull UnifiedFragmentBuilder builder, @Nullable EditorHighlighter highlighter, @Nullable UnifiedEditorRangeHighlighter rangeHighlighter) { myBuilder = builder; myHighlighter = highlighter; myRangeHighlighter = rangeHighlighter; } @NotNull public UnifiedFragmentBuilder getBuilder() { return myBuilder; } @Nullable public EditorHighlighter getHighlighter() { return myHighlighter; } @Nullable public UnifiedEditorRangeHighlighter getRangeHighlighter() { return myRangeHighlighter; } } private static class ChangedBlockData { @NotNull private final List<UnifiedDiffChange> myDiffChanges; @NotNull private final List<RangeMarker> myGuardedRangeBlocks; @NotNull private final LineNumberConvertor myLineNumberConvertor; public ChangedBlockData(@NotNull List<UnifiedDiffChange> diffChanges, @NotNull List<RangeMarker> guarderRangeBlocks, @NotNull LineNumberConvertor lineNumberConvertor) { myDiffChanges = diffChanges; myGuardedRangeBlocks = guarderRangeBlocks; myLineNumberConvertor = lineNumberConvertor; } @NotNull public List<UnifiedDiffChange> getDiffChanges() { return myDiffChanges; } @NotNull public List<RangeMarker> getGuardedRangeBlocks() { return myGuardedRangeBlocks; } @NotNull public LineNumberConvertor getLineNumberConvertor() { return myLineNumberConvertor; } } private static class CombinedEditorData { @NotNull private final CharSequence myText; @Nullable private final EditorHighlighter myHighlighter; @Nullable private final UnifiedEditorRangeHighlighter myRangeHighlighter; @Nullable private final FileType myFileType; @NotNull private final TIntFunction myLineConvertor1; @NotNull private final TIntFunction myLineConvertor2; public CombinedEditorData(@NotNull CharSequence text, @Nullable EditorHighlighter highlighter, @Nullable UnifiedEditorRangeHighlighter rangeHighlighter, @Nullable FileType fileType, @NotNull TIntFunction convertor1, @NotNull TIntFunction convertor2) { myText = text; myHighlighter = highlighter; myRangeHighlighter = rangeHighlighter; myFileType = fileType; myLineConvertor1 = convertor1; myLineConvertor2 = convertor2; } @NotNull public CharSequence getText() { return myText; } @Nullable public EditorHighlighter getHighlighter() { return myHighlighter; } @Nullable public UnifiedEditorRangeHighlighter getRangeHighlighter() { return myRangeHighlighter; } @Nullable public FileType getFileType() { return myFileType; } @NotNull public TIntFunction getLineConvertor1() { return myLineConvertor1; } @NotNull public TIntFunction getLineConvertor2() { return myLineConvertor2; } } private class MyInitialScrollHelper extends InitialScrollPositionSupport.TwosideInitialScrollHelper { @NotNull @Override protected List<? extends Editor> getEditors() { return UnifiedDiffViewer.this.getEditors(); } @Override protected void disableSyncScroll(boolean value) { } @Override public void onSlowRediff() { // Will not happen for initial rediff } @Nullable @Override protected LogicalPosition[] getCaretPositions() { LogicalPosition position = myEditor.getCaretModel().getLogicalPosition(); Pair<int[], Side> pair = transferLineFromOneside(position.line); LogicalPosition[] carets = new LogicalPosition[2]; carets[0] = getPosition(pair.first[0], position.column); carets[1] = getPosition(pair.first[1], position.column); return carets; } @Override protected boolean doScrollToPosition() { if (myCaretPosition == null) return false; LogicalPosition twosidePosition = myMasterSide.selectNotNull(myCaretPosition); int onesideLine = transferLineToOneside(myMasterSide, twosidePosition.line); LogicalPosition position = new LogicalPosition(onesideLine, twosidePosition.column); myEditor.getCaretModel().moveToLogicalPosition(position); if (myEditorsPosition != null && myEditorsPosition.isSame(position)) { DiffUtil.scrollToPoint(myEditor, myEditorsPosition.myPoints[0], false); } else { DiffUtil.scrollToCaret(myEditor, false); } return true; } @NotNull private LogicalPosition getPosition(int line, int column) { if (line == -1) return new LogicalPosition(0, 0); return new LogicalPosition(line, column); } private void doScrollToLine(@NotNull Side side, @NotNull LogicalPosition position) { int onesideLine = transferLineToOneside(side, position.line); DiffUtil.scrollEditor(myEditor, onesideLine, position.column, false); } @Override protected boolean doScrollToLine() { if (myScrollToLine == null) return false; doScrollToLine(myScrollToLine.first, new LogicalPosition(myScrollToLine.second, 0)); return true; } private boolean doScrollToChange(@NotNull ScrollToPolicy scrollToChangePolicy) { if (myChangedBlockData == null) return false; List<UnifiedDiffChange> changes = myChangedBlockData.getDiffChanges(); UnifiedDiffChange targetChange = scrollToChangePolicy.select(changes); if (targetChange == null) return false; DiffUtil.scrollEditor(myEditor, targetChange.getLine1(), false); return true; } @Override protected boolean doScrollToChange() { if (myScrollToChange == null) return false; return doScrollToChange(myScrollToChange); } @Override protected boolean doScrollToFirstChange() { return doScrollToChange(ScrollToPolicy.FIRST_CHANGE); } @Override protected boolean doScrollToContext() { if (myNavigationContext == null) return false; if (myChangedBlockData == null) return false; ChangedLinesIterator changedLinesIterator = new ChangedLinesIterator(Side.RIGHT, myChangedBlockData.getDiffChanges()); NavigationContextChecker checker = new NavigationContextChecker(changedLinesIterator, myNavigationContext); int line = checker.contextMatchCheck(); if (line == -1) { // this will work for the case, when spaces changes are ignored, and corresponding fragments are not reported as changed // just try to find target line -> +- AllLinesIterator allLinesIterator = new AllLinesIterator(Side.RIGHT); NavigationContextChecker checker2 = new NavigationContextChecker(allLinesIterator, myNavigationContext); line = checker2.contextMatchCheck(); } if (line == -1) return false; doScrollToLine(Side.RIGHT, new LogicalPosition(line, 0)); return true; } } private static class MyFoldingModel extends FoldingModelSupport { public MyFoldingModel(@NotNull EditorEx editor, @NotNull Disposable disposable) { super(new EditorEx[]{editor}, disposable); } public void install(@Nullable List<LineRange> changedLines, @NotNull UserDataHolder context, @NotNull FoldingModelSupport.Settings settings) { Iterator<int[]> it = map(changedLines, new Function<LineRange, int[]>() { @Override public int[] fun(LineRange line) { return new int[]{ line.start, line.end}; } }); install(it, context, settings); } @NotNull public TIntFunction getLineNumberConvertor() { return getLineConvertor(0); } } private static class MyReadonlyFragmentModificationHandler implements ReadonlyFragmentModificationHandler { @Override public void handle(ReadOnlyFragmentModificationException e) { // do nothing } } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; /** * <p> * Describes the limit price of a Reserved Instance offering. * </p> */ public class ReservedInstanceLimitPrice implements Serializable, Cloneable { /** * Used for Reserved Instance Marketplace offerings. Specifies the limit * price on the total order (instanceCount * price). */ private Double amount; /** * The currency in which the <code>limitPrice</code> amount is specified. * At this time, the only supported currency is <code>USD</code>. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>USD */ private String currencyCode; /** * Used for Reserved Instance Marketplace offerings. Specifies the limit * price on the total order (instanceCount * price). * * @return Used for Reserved Instance Marketplace offerings. Specifies the limit * price on the total order (instanceCount * price). */ public Double getAmount() { return amount; } /** * Used for Reserved Instance Marketplace offerings. Specifies the limit * price on the total order (instanceCount * price). * * @param amount Used for Reserved Instance Marketplace offerings. Specifies the limit * price on the total order (instanceCount * price). */ public void setAmount(Double amount) { this.amount = amount; } /** * Used for Reserved Instance Marketplace offerings. Specifies the limit * price on the total order (instanceCount * price). * <p> * Returns a reference to this object so that method calls can be chained together. * * @param amount Used for Reserved Instance Marketplace offerings. Specifies the limit * price on the total order (instanceCount * price). * * @return A reference to this updated object so that method calls can be chained * together. */ public ReservedInstanceLimitPrice withAmount(Double amount) { this.amount = amount; return this; } /** * The currency in which the <code>limitPrice</code> amount is specified. * At this time, the only supported currency is <code>USD</code>. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>USD * * @return The currency in which the <code>limitPrice</code> amount is specified. * At this time, the only supported currency is <code>USD</code>. * * @see CurrencyCodeValues */ public String getCurrencyCode() { return currencyCode; } /** * The currency in which the <code>limitPrice</code> amount is specified. * At this time, the only supported currency is <code>USD</code>. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>USD * * @param currencyCode The currency in which the <code>limitPrice</code> amount is specified. * At this time, the only supported currency is <code>USD</code>. * * @see CurrencyCodeValues */ public void setCurrencyCode(String currencyCode) { this.currencyCode = currencyCode; } /** * The currency in which the <code>limitPrice</code> amount is specified. * At this time, the only supported currency is <code>USD</code>. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>USD * * @param currencyCode The currency in which the <code>limitPrice</code> amount is specified. * At this time, the only supported currency is <code>USD</code>. * * @return A reference to this updated object so that method calls can be chained * together. * * @see CurrencyCodeValues */ public ReservedInstanceLimitPrice withCurrencyCode(String currencyCode) { this.currencyCode = currencyCode; return this; } /** * The currency in which the <code>limitPrice</code> amount is specified. * At this time, the only supported currency is <code>USD</code>. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>USD * * @param currencyCode The currency in which the <code>limitPrice</code> amount is specified. * At this time, the only supported currency is <code>USD</code>. * * @see CurrencyCodeValues */ public void setCurrencyCode(CurrencyCodeValues currencyCode) { this.currencyCode = currencyCode.toString(); } /** * The currency in which the <code>limitPrice</code> amount is specified. * At this time, the only supported currency is <code>USD</code>. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>USD * * @param currencyCode The currency in which the <code>limitPrice</code> amount is specified. * At this time, the only supported currency is <code>USD</code>. * * @return A reference to this updated object so that method calls can be chained * together. * * @see CurrencyCodeValues */ public ReservedInstanceLimitPrice withCurrencyCode(CurrencyCodeValues currencyCode) { this.currencyCode = currencyCode.toString(); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAmount() != null) sb.append("Amount: " + getAmount() + ","); if (getCurrencyCode() != null) sb.append("CurrencyCode: " + getCurrencyCode() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAmount() == null) ? 0 : getAmount().hashCode()); hashCode = prime * hashCode + ((getCurrencyCode() == null) ? 0 : getCurrencyCode().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ReservedInstanceLimitPrice == false) return false; ReservedInstanceLimitPrice other = (ReservedInstanceLimitPrice)obj; if (other.getAmount() == null ^ this.getAmount() == null) return false; if (other.getAmount() != null && other.getAmount().equals(this.getAmount()) == false) return false; if (other.getCurrencyCode() == null ^ this.getCurrencyCode() == null) return false; if (other.getCurrencyCode() != null && other.getCurrencyCode().equals(this.getCurrencyCode()) == false) return false; return true; } @Override public ReservedInstanceLimitPrice clone() { try { return (ReservedInstanceLimitPrice) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package uk.ac.gcu.bluedroid; /* * Copyright (C) 2009 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Set; import android.app.Activity; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.util.Log; import android.view.View; import android.view.Window; import android.view.View.OnClickListener; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import android.widget.TextView; import android.widget.AdapterView.OnItemClickListener; /** * This Activity appears as a dialog. It lists any paired devices and * devices detected in the area after discovery. When a device is chosen * by the user, the MAC address of the device is sent back to the parent * Activity in the result Intent. */ public class DeviceListActivity extends Activity { // Debugging private static final String TAG = "DeviceListActivity"; private static final boolean D = true; // Return Intent extra public static String EXTRA_DEVICE_ADDRESS = "device_address"; // Member fields private BluetoothAdapter mBtAdapter; private ArrayAdapter<String> mPairedDevicesArrayAdapter; private ArrayAdapter<String> mNewDevicesArrayAdapter; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Setup the window requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS); setContentView(R.layout.device_list); // Set result CANCELED in case the user backs out setResult(Activity.RESULT_CANCELED); // Initialize the button to perform device discovery Button scanButton = (Button) findViewById(R.id.button_scan); scanButton.setOnClickListener(new OnClickListener() { public void onClick(View v) { doDiscovery(); v.setVisibility(View.GONE); } }); // Initialize array adapters. One for already paired devices and // one for newly discovered devices mPairedDevicesArrayAdapter = new ArrayAdapter<String>(this, R.layout.device_name); mNewDevicesArrayAdapter = new ArrayAdapter<String>(this, R.layout.device_name); // Find and set up the ListView for paired devices ListView pairedListView = (ListView) findViewById(R.id.paired_devices); pairedListView.setAdapter(mPairedDevicesArrayAdapter); pairedListView.setOnItemClickListener(mDeviceClickListener); // Find and set up the ListView for newly discovered devices ListView newDevicesListView = (ListView) findViewById(R.id.new_devices); newDevicesListView.setAdapter(mNewDevicesArrayAdapter); newDevicesListView.setOnItemClickListener(mDeviceClickListener); // Register for broadcasts when a device is discovered IntentFilter filter = new IntentFilter(BluetoothDevice.ACTION_FOUND); this.registerReceiver(mReceiver, filter); // Register for broadcasts when discovery has finished filter = new IntentFilter(BluetoothAdapter.ACTION_DISCOVERY_FINISHED); this.registerReceiver(mReceiver, filter); // Get the local Bluetooth adapter mBtAdapter = BluetoothAdapter.getDefaultAdapter(); // Get a set of currently paired devices Set<BluetoothDevice> pairedDevices = mBtAdapter.getBondedDevices(); // If there are paired devices, add each one to the ArrayAdapter if (pairedDevices.size() > 0) { findViewById(R.id.title_paired_devices).setVisibility(View.VISIBLE); for (BluetoothDevice device : pairedDevices) { mPairedDevicesArrayAdapter.add(device.getName() + "\n" + device.getAddress()); } } else { String noDevices = getResources().getText(R.string.none_paired).toString(); mPairedDevicesArrayAdapter.add(noDevices); } } @Override protected void onDestroy() { super.onDestroy(); // Make sure we're not doing discovery anymore if (mBtAdapter != null) { mBtAdapter.cancelDiscovery(); } // Unregister broadcast listeners this.unregisterReceiver(mReceiver); } /** * Start device discover with the BluetoothAdapter */ private void doDiscovery() { if (D) Log.d(TAG, "doDiscovery()"); // Indicate scanning in the title setProgressBarIndeterminateVisibility(true); setTitle(R.string.scanning); // Turn on sub-title for new devices findViewById(R.id.title_new_devices).setVisibility(View.VISIBLE); // If we're already discovering, stop it if (mBtAdapter.isDiscovering()) { mBtAdapter.cancelDiscovery(); } // Request discover from BluetoothAdapter mBtAdapter.startDiscovery(); } // The on-click listener for all devices in the ListViews private OnItemClickListener mDeviceClickListener = new OnItemClickListener() { public void onItemClick(AdapterView<?> av, View v, int arg2, long arg3) { // Cancel discovery because it's costly and we're about to connect mBtAdapter.cancelDiscovery(); // Get the device MAC address, which is the last 17 chars in the View String info = ((TextView) v).getText().toString(); String address = info.substring(info.length() - 17); // Create the result Intent and include the MAC address Intent intent = new Intent(); intent.putExtra(EXTRA_DEVICE_ADDRESS, address); // Set result and finish this Activity setResult(Activity.RESULT_OK, intent); finish(); } }; // The BroadcastReceiver that listens for discovered devices and // changes the title when discovery is finished private final BroadcastReceiver mReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); // When discovery finds a device if (BluetoothDevice.ACTION_FOUND.equals(action)) { // Get the BluetoothDevice object from the Intent BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); // If it's already paired, skip it, because it's been listed already if (device.getBondState() != BluetoothDevice.BOND_BONDED) { mNewDevicesArrayAdapter.add(device.getName() + "\n" + device.getAddress()); } // When discovery is finished, change the Activity title } else if (BluetoothAdapter.ACTION_DISCOVERY_FINISHED.equals(action)) { setProgressBarIndeterminateVisibility(false); setTitle(R.string.select_device); if (mNewDevicesArrayAdapter.getCount() == 0) { String noDevices = getResources().getText(R.string.none_found).toString(); mNewDevicesArrayAdapter.add(noDevices); } } } }; }
package com.magento.idea.magento2plugin.xml.di.index; import com.intellij.ide.highlighter.XmlFileType; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.impl.source.xml.XmlDocumentImpl; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.xml.XmlAttribute; import com.intellij.psi.xml.XmlAttributeValue; import com.intellij.psi.xml.XmlFile; import com.intellij.psi.xml.XmlTag; import com.intellij.util.ArrayUtil; import com.intellij.util.indexing.*; import com.intellij.util.io.DataExternalizer; import com.intellij.util.io.EnumeratorStringDescriptor; import com.intellij.util.io.KeyDescriptor; import com.jetbrains.php.PhpIndex; import com.jetbrains.php.lang.PhpLangUtil; import com.jetbrains.php.lang.psi.elements.PhpClass; import com.magento.idea.magento2plugin.Settings; import org.jetbrains.annotations.NotNull; import java.util.*; /** * Created by dkvashnin on 10/13/15. */ public class VirtualTypesNamesFileBasedIndex extends FileBasedIndexExtension<String,String> { public static final ID<String, String> NAME = ID.create("com.magento.idea.magento2plugin.xml.di.index.virtual_types_names"); private final EnumeratorStringDescriptor myKeyDescriptor = new EnumeratorStringDescriptor(); private final MyDataIndexer myDataIndexer = new MyDataIndexer(); private static final int SUPER_MAX_NESTING_LEVEL = 3; public static String[] getAllVirtualTypesNames(final Project project) { final Collection<String> allKeys = FileBasedIndex.getInstance().getAllKeys(NAME, project); return ArrayUtil.toStringArray(allKeys); } public static XmlAttributeValue[] getVirtualTypesByName(final Project project, final String virtualTypeName, final GlobalSearchScope scope) { List<XmlAttributeValue> xmlAttributeList = new ArrayList<XmlAttributeValue>(); Collection<VirtualFile> virtualFileCollection = FileBasedIndex.getInstance().getContainingFiles(NAME, virtualTypeName, scope); PsiManager psiManager = PsiManager.getInstance(project); for (VirtualFile virtualFile: virtualFileCollection) { XmlFile xmlFile = (XmlFile)psiManager.findFile(virtualFile); if (xmlFile == null) { continue; } XmlTag rootTag = xmlFile.getRootTag(); if (rootTag == null) { continue; } for (XmlTag typeTag: rootTag.getSubTags()) { if (typeTag.getName().equals("virtualType")) { XmlAttribute nameAttribute = typeTag.getAttribute("name"); if (nameAttribute != null) { if (nameAttribute.getValue() != null && nameAttribute.getValue().equals(virtualTypeName)) xmlAttributeList.add(nameAttribute.getValueElement()); } } } } return xmlAttributeList.toArray(new XmlAttributeValue[xmlAttributeList.size()]); } public static String getParentTypeName(final Project project, String virtualTypeName) { List<String> originNames = FileBasedIndex.getInstance().getValues(NAME, virtualTypeName, GlobalSearchScope.allScope(project)); if (originNames.size() > 0) { return originNames.get(0); } return null; } public static String getSuperParentTypeName(final Project project, String inputChildTypeName) { String superName = null; String childTypeName = inputChildTypeName; for (int index = 0; index < SUPER_MAX_NESTING_LEVEL; index++) { superName = getParentTypeName(project, childTypeName); if (superName == null) { superName = childTypeName; break; } childTypeName = superName; } return superName == null ? (!inputChildTypeName.equals(childTypeName) ? childTypeName : null) : superName; } public static List<PhpClass> getSuperParentTypes(final Project project, String inputChildTypeName) { List<PhpClass> result = new ArrayList<>(); String superName = getSuperParentTypeName(project, inputChildTypeName); if (superName == null) { return result; } PhpIndex phpIndex = PhpIndex.getInstance(project); result.addAll(phpIndex.getClassesByFQN(superName)); result.addAll(phpIndex.getInterfacesByFQN(superName)); return result; } @NotNull @Override public ID<String, String> getName() { return NAME; } @NotNull @Override public DataIndexer<String, String, FileContent> getIndexer() { return myDataIndexer; } @NotNull @Override public KeyDescriptor<String> getKeyDescriptor() { return myKeyDescriptor; } @NotNull @Override public DataExternalizer<String> getValueExternalizer() { return EnumeratorStringDescriptor.INSTANCE; } @NotNull @Override public FileBasedIndex.InputFilter getInputFilter() { return new FileBasedIndex.InputFilter() { @Override public boolean acceptInput(@NotNull VirtualFile file) { return file.getFileType() == XmlFileType.INSTANCE && file.getNameWithoutExtension().equals("di"); } }; } @Override public boolean dependsOnFileContent() { return true; } @Override public int getVersion() { return 2; } private class MyDataIndexer implements DataIndexer<String, String, FileContent> { @NotNull @Override public Map<String, String> map(@NotNull FileContent fileContent) { Map<String, String> map = new HashMap<>(); PsiFile psiFile = fileContent.getPsiFile(); if (!Settings.isEnabled(psiFile.getProject())) { return map; } XmlDocumentImpl document = PsiTreeUtil.getChildOfType(psiFile, XmlDocumentImpl.class); if(document == null) { return map; } XmlTag xmlTags[] = PsiTreeUtil.getChildrenOfType(psiFile.getFirstChild(), XmlTag.class); if(xmlTags == null) { return map; } for(XmlTag xmlTag: xmlTags) { if(xmlTag.getName().equals("config")) { for(XmlTag typeNode: xmlTag.findSubTags("virtualType")) { if (typeNode.getAttributeValue("name") != null && typeNode.getAttributeValue("type") != null) { map.put( typeNode.getAttributeValue("name"), PhpLangUtil.toPresentableFQN(typeNode.getAttributeValue("type")) ); } } } } return map; } } }
/* * Copyright (C) 2016 Peter Gregus for GravityBox Project (C3C076@xda) * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ceco.marshmallow.gravitybox; import java.io.File; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; import com.ceco.marshmallow.gravitybox.ledcontrol.QuietHours; import android.app.Activity; import android.app.Fragment; import android.content.Context; import android.content.Intent; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.graphics.drawable.LayerDrawable; import android.widget.ImageView; import de.robv.android.xposed.XC_MethodHook; import de.robv.android.xposed.XSharedPreferences; import de.robv.android.xposed.XposedBridge; import de.robv.android.xposed.XposedHelpers; public class ModDialer25 { private static final String TAG = "GB:ModDialer25"; private static final String CLASS_DIALTACTS_ACTIVITY = "com.android.dialer.app.DialtactsActivity"; private static final String CLASS_DIALTACTS_ACTIVITY_GOOGLE = "com.google.android.apps.dialer.extensions.GoogleDialtactsActivity"; private static final boolean DEBUG = false; private static QuietHours mQuietHours; private static long mPrefsReloadedTstamp; private static void log(String message) { XposedBridge.log(TAG + ": " + message); } static class ClassInfo { Class<?> clazz; Map<String,String> methods; Object extra; ClassInfo(Class<?> cls) { clazz = cls; methods = new HashMap<>(); } } private static ClassInfo resolveCallCardFragment(ClassLoader cl) { ClassInfo info = null; String[] CLASS_NAMES = new String[] { "bfz" }; String[] METHOD_NAMES = new String[] { "a" }; for (String className : CLASS_NAMES) { Class<?> clazz = XposedHelpers.findClassIfExists(className, cl); info = new ClassInfo(clazz); for (String methodName : METHOD_NAMES) { if (methodName.equals("a")) { for (String realMethodName : new String[] { methodName }) { Method m = XposedHelpers.findMethodExactIfExists(clazz, realMethodName); if (m != null) { info.methods.put(methodName, realMethodName); break; } } } } } return info; } private static ClassInfo resolveDialtactsActivity(ClassLoader cl) { ClassInfo info = null; String[] CLASS_NAMES = new String[] { CLASS_DIALTACTS_ACTIVITY }; String[] METHOD_NAMES = new String[] { "displayFragment" }; for (String className : CLASS_NAMES) { Class<?> clazz = XposedHelpers.findClassIfExists(className, cl); if (clazz == null || !Activity.class.isAssignableFrom(clazz)) continue; info = new ClassInfo(clazz); for (String methodName : METHOD_NAMES) { if (methodName.equals("displayFragment")) { for (String realMethodName : new String[] { methodName, "c" }) { Method m = XposedHelpers.findMethodExactIfExists(clazz, realMethodName, Intent.class); if (m != null) { info.methods.put(methodName, realMethodName); if (realMethodName.equals(methodName)) { info.extra = "showDialpadFragment"; } else { info.extra = "b"; } break; } } } } } return info; } private static ClassInfo resolveDialpadFragment(ClassLoader cl) { ClassInfo info = null; String[] CLASS_NAMES = new String[] { "com.android.dialer.app.dialpad.DialpadFragment" }; String[] METHOD_NAMES = new String[] { "onResume", "playTone" }; for (String className : CLASS_NAMES) { Class<?> clazz = XposedHelpers.findClassIfExists(className, cl); if (clazz == null || !Fragment.class.isAssignableFrom(clazz)) continue; info = new ClassInfo(clazz); for (String methodName : METHOD_NAMES) { Method m = null; if (methodName.equals("onResume")) { m = XposedHelpers.findMethodExactIfExists(clazz, methodName); } else if (methodName.equals("playTone")) { for (String realMethodName : new String[] { methodName, "a" }) { m = XposedHelpers.findMethodExactIfExists(clazz, realMethodName, int.class, int.class); if (m != null) break; } } if (m != null) { info.methods.put(methodName, m.getName()); } } } return info; } private static Drawable getUnknownCallerDrawable(Context ctx) throws Throwable { final String path = Utils.getGbContext(ctx).getFilesDir() + "/caller_photo"; File f = new File(path); if (f.exists() && f.canRead()) { Bitmap b = BitmapFactory.decodeFile(path); if (b != null) { return new BitmapDrawable(ctx.getResources(), Utils.getCircleBitmap(b)); } } return null; } private static void reloadPrefs(XSharedPreferences prefs) { if ((System.currentTimeMillis() - mPrefsReloadedTstamp) > 10000) { if (DEBUG) log("Reloading preferences"); prefs.reload(); mPrefsReloadedTstamp = System.currentTimeMillis(); } } public static void init(final XSharedPreferences prefs, final ClassLoader classLoader, final String packageName) { try { final ClassInfo classInfoCallCardFragment = resolveCallCardFragment(classLoader); XC_MethodHook unknownCallerHook = new XC_MethodHook() { @Override protected void afterHookedMethod(MethodHookParam param) throws Throwable { reloadPrefs(prefs); if (!prefs.getBoolean( GravityBoxSettings.PREF_KEY_CALLER_UNKNOWN_PHOTO_ENABLE, false)) return; ImageView iv = (ImageView) XposedHelpers.getObjectField(param.thisObject, "f"); if (iv == null || !(iv.getDrawable() instanceof LayerDrawable)) return; final Resources res = iv.getResources(); String resName = "product_logo_avatar_anonymous_color_120"; Drawable picUnknown = res.getDrawable(res.getIdentifier(resName, "drawable", packageName), null); Drawable d = ((LayerDrawable) iv.getDrawable()).getDrawable(0); if (d != null && picUnknown.getConstantState().equals(d.getConstantState())) { Drawable newD = getUnknownCallerDrawable(iv.getContext()); if (newD != null) { ((LayerDrawable) iv.getDrawable()).setDrawable(0, newD); if (DEBUG) log("Unknow caller photo set"); } } } }; XposedHelpers.findAndHookMethod(classInfoCallCardFragment.clazz, classInfoCallCardFragment.methods.get("a"), unknownCallerHook); } catch (Throwable t) { XposedBridge.log(t); } try { XposedHelpers.findAndHookMethod("bae", classLoader, "a", Drawable.class, Bitmap.class, Object.class, new XC_MethodHook() { @Override protected void beforeHookedMethod(MethodHookParam param) throws Throwable { reloadPrefs(prefs); if (!prefs.getBoolean( GravityBoxSettings.PREF_KEY_CALLER_UNKNOWN_PHOTO_ENABLE, false)) return; final Context ctx = (Context) XposedHelpers.getObjectField(param.thisObject, "b"); final Resources res = ctx.getResources(); String resName = "img_no_image_automirrored"; Drawable picUnknown = res.getDrawable(res.getIdentifier(resName, "drawable", packageName), null); Drawable d = (Drawable) param.args[0]; if (d == null || picUnknown.getConstantState().equals(d.getConstantState())) { Drawable newD = getUnknownCallerDrawable(ctx); if (newD != null) { param.args[0] = newD; if (DEBUG) log("Unknow incoming caller photo set"); } } } }); } catch (Throwable t) { XposedBridge.log(t); } try { final ClassInfo classInfoDialtactsActivity = resolveDialtactsActivity(classLoader); XposedHelpers.findAndHookMethod(classInfoDialtactsActivity.clazz, classInfoDialtactsActivity.methods.get("displayFragment"), Intent.class, new XC_MethodHook() { @Override protected void afterHookedMethod(MethodHookParam param) throws Throwable { reloadPrefs(prefs); if (!prefs.getBoolean(GravityBoxSettings.PREF_KEY_DIALER_SHOW_DIALPAD, false)) return; final String realClassName = param.thisObject.getClass().getName(); if (realClassName.equals(CLASS_DIALTACTS_ACTIVITY)) { XposedHelpers.callMethod(param.thisObject, classInfoDialtactsActivity.extra.toString(), false); if (DEBUG) log("showDialpadFragment() called within " + realClassName); } else if (realClassName.equals(CLASS_DIALTACTS_ACTIVITY_GOOGLE)) { final Class<?> superc = param.thisObject.getClass().getSuperclass(); Method m = XposedHelpers.findMethodExact(superc, classInfoDialtactsActivity.extra.toString(), boolean.class); m.invoke(param.thisObject, false); if (DEBUG) log("showDialpadFragment() called within " + realClassName); } } }); } catch (Throwable t) { XposedBridge.log(t); } try { final ClassInfo classInfoDialpadFragment = resolveDialpadFragment(classLoader); XposedHelpers.findAndHookMethod(classInfoDialpadFragment.clazz, classInfoDialpadFragment.methods.get("onResume"), new XC_MethodHook() { @Override protected void afterHookedMethod(MethodHookParam param2) throws Throwable { XSharedPreferences qhPrefs = new XSharedPreferences(GravityBox.PACKAGE_NAME, "quiet_hours"); mQuietHours = new QuietHours(qhPrefs); } }); XposedHelpers.findAndHookMethod(classInfoDialpadFragment.clazz, classInfoDialpadFragment.methods.get("playTone"), int.class, int.class, new XC_MethodHook() { @Override protected void beforeHookedMethod(MethodHookParam param) throws Throwable { if (mQuietHours.isSystemSoundMuted(QuietHours.SystemSound.DIALPAD)) { param.setResult(null); } } }); } catch (Throwable t) { XposedBridge.log(t); } } }
/* * Copyright 2005-2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.ldap; import javax.naming.NamingException; import javax.naming.directory.Attributes; import org.springframework.ldap.core.AttributesMapper; import org.springframework.ldap.core.ContextMapper; import org.springframework.ldap.core.DirContextAdapter; import org.springframework.ldap.core.DistinguishedName; import org.springframework.ldap.core.LdapTemplate; import org.springframework.test.AbstractDependencyInjectionSpringContextTests; /** * Tests the lookup methods of LdapTemplate on OpenLdap. * * @author Mattias Hellborg Arthursson * @author Ulrik Sandberg */ public class LdapTemplateLookupOpenLdapITest extends AbstractDependencyInjectionSpringContextTests { private LdapTemplate tested; protected String[] getConfigLocations() { return new String[] { "/conf/ldapTemplateTestContext-openldap.xml" }; } /** * This method depends on a DirObjectFactory ({@link org.springframework.ldap.core.support.DefaultDirObjectFactory}) * being set in the ContextSource. */ public void testLookup_Plain() { DirContextAdapter result = (DirContextAdapter) tested .lookup("cn=Some Person2, ou=company1,c=Sweden"); assertEquals("Some Person2", result.getStringAttribute("cn")); assertEquals("Person2", result.getStringAttribute("sn")); assertEquals("Sweden, Company1, Some Person2", result .getStringAttribute("description")); } public void testLookup_AttributesMapper() { AttributesMapper mapper = new PersonAttributesMapper(); Person person = (Person) tested.lookup( "cn=Some Person2, ou=company1,c=Sweden", mapper); assertEquals("Some Person2", person.getFullname()); assertEquals("Person2", person.getLastname()); assertEquals("Sweden, Company1, Some Person2", person.getDescription()); } public void testLookup_AttributesMapper_DistinguishedName() { AttributesMapper mapper = new PersonAttributesMapper(); Person person = (Person) tested.lookup(new DistinguishedName( "cn=Some Person2, ou=company1,c=Sweden"), mapper); assertEquals("Some Person2", person.getFullname()); assertEquals("Person2", person.getLastname()); assertEquals("Sweden, Company1, Some Person2", person.getDescription()); } /** * An {@link AttributesMapper} that only maps a subset of the full * attributes list. Used in tests where the return attributes list has been * limited. * * @author Ulrik Sandberg */ private final class SubsetPersonAttributesMapper implements AttributesMapper { /** * Maps the <code>cn</code> attribute into a {@link Person} object. * Also verifies that the other attributes haven't been set. * * @see org.springframework.ldap.core.AttributesMapper#mapFromAttributes(javax.naming.directory.Attributes) */ public Object mapFromAttributes(Attributes attributes) throws NamingException { Person person = new Person(); person.setFullname((String) attributes.get("cn").get()); assertNull("sn should be null", attributes.get("sn")); assertNull("description should be null", attributes .get("description")); return person; } } /** * Verifies that only the subset is used when specifying a subset of the * available attributes as return attributes. */ public void testLookup_ReturnAttributes_AttributesMapper() { AttributesMapper mapper = new SubsetPersonAttributesMapper(); Person person = (Person) tested.lookup( "cn=Some Person2, ou=company1,c=Sweden", new String[] { "cn" }, mapper); assertEquals("Some Person2", person.getFullname()); assertNull("lastName should not be set", person.getLastname()); assertNull("description should not be set", person.getDescription()); } /** * Verifies that only the subset is used when specifying a subset of the * available attributes as return attributes. Uses DistinguishedName instead * of plain string as name. */ public void testLookup_ReturnAttributes_AttributesMapper_DistinguishedName() { AttributesMapper mapper = new SubsetPersonAttributesMapper(); Person person = (Person) tested.lookup(new DistinguishedName( "cn=Some Person2, ou=company1,c=Sweden"), new String[] { "cn" }, mapper); assertEquals("Some Person2", person.getFullname()); assertNull("lastName should not be set", person.getLastname()); assertNull("description should not be set", person.getDescription()); } /** * This method depends on a DirObjectFactory ({@link org.springframework.ldap.core.support.DefaultDirObjectFactory}) * being set in the ContextSource. */ public void testLookup_ContextMapper() { ContextMapper mapper = new PersonContextMapper(); Person person = (Person) tested.lookup( "cn=Some Person2, ou=company1,c=Sweden", mapper); assertEquals("Some Person2", person.getFullname()); assertEquals("Person2", person.getLastname()); assertEquals("Sweden, Company1, Some Person2", person.getDescription()); } /** * Verifies that only the subset is used when specifying a subset of the * available attributes as return attributes. */ public void testLookup_ReturnAttributes_ContextMapper() { ContextMapper mapper = new PersonContextMapper(); Person person = (Person) tested.lookup( "cn=Some Person2, ou=company1,c=Sweden", new String[] { "cn" }, mapper); assertEquals("Some Person2", person.getFullname()); assertNull("lastName should not be set", person.getLastname()); assertNull("description should not be set", person.getDescription()); } /** * Verifies that we can lookup an entry that has a multi-valued rdn, which * means more than one attribute is part of the relative DN for the entry. */ public void testLookup_MultiValuedRdn() { AttributesMapper mapper = new PersonAttributesMapper(); Person person = (Person) tested.lookup( "cn=Some Person+sn=Person, ou=company1,c=Norway", mapper); assertEquals("Some Person", person.getFullname()); assertEquals("Person", person.getLastname()); assertEquals("Norway, Company1, Some Person+Person", person .getDescription()); } /** * Verifies that we can lookup an entry that has a multi-valued rdn, which * means more than one attribute is part of the relative DN for the entry. */ public void testLookup_MultiValuedRdn_DirContextAdapter() { DirContextAdapter result = (DirContextAdapter) tested .lookup("cn=Some Person+sn=Person, ou=company1,c=Norway"); assertEquals("Some Person", result.getStringAttribute("cn")); assertEquals("Person", result.getStringAttribute("sn")); assertEquals("Norway, Company1, Some Person+Person", result .getStringAttribute("description")); } public void testLookup_GetNameInNamespace_Plain() { DirContextAdapter result = (DirContextAdapter) tested .lookup("cn=Some Person2, ou=company1,c=Sweden"); assertEquals("cn=Some Person2, ou=company1, c=Sweden", result.getDn() .toString()); assertEquals( "cn=Some Person2, ou=company1, c=Sweden, dc=jayway, dc=se", result.getNameInNamespace()); } public void testLookup_GetNameInNamespace_MultiRdn() { DirContextAdapter result = (DirContextAdapter) tested .lookup("cn=Some Person+sn=Person, ou=company1,c=Norway"); assertEquals("cn=Some Person+sn=Person, ou=company1, c=Norway", result .getDn().toString()); assertEquals( "cn=Some Person+sn=Person, ou=company1, c=Norway, dc=jayway, dc=se", result.getNameInNamespace()); } public void setTested(LdapTemplate tested) { this.tested = tested; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner.assertions; import com.facebook.presto.Session; import com.facebook.presto.cost.StatsProvider; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.spi.block.SortOrder; import com.facebook.presto.spi.function.FunctionHandle; import com.facebook.presto.spi.plan.PlanNode; import com.facebook.presto.spi.relation.VariableReferenceExpression; import com.facebook.presto.sql.planner.Symbol; import com.facebook.presto.sql.planner.plan.WindowNode; import com.facebook.presto.sql.tree.FunctionCall; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import static com.facebook.presto.sql.planner.assertions.MatchResult.NO_MATCH; import static com.facebook.presto.sql.planner.assertions.MatchResult.match; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.node; import static com.facebook.presto.sql.planner.assertions.SpecificationProvider.matchSpecification; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static java.util.Objects.requireNonNull; /** * Optionally validates each of the non-function fields of the node. */ public final class WindowMatcher implements Matcher { private final Optional<Set<SymbolAlias>> prePartitionedInputs; private final Optional<ExpectedValueProvider<WindowNode.Specification>> specification; private final Optional<Integer> preSortedOrderPrefix; private final Optional<Optional<SymbolAlias>> hashSymbol; private WindowMatcher( Optional<Set<SymbolAlias>> prePartitionedInputs, Optional<ExpectedValueProvider<WindowNode.Specification>> specification, Optional<Integer> preSortedOrderPrefix, Optional<Optional<SymbolAlias>> hashSymbol) { this.prePartitionedInputs = requireNonNull(prePartitionedInputs, "prePartitionedInputs is null"); this.specification = requireNonNull(specification, "specification is null"); this.preSortedOrderPrefix = requireNonNull(preSortedOrderPrefix, "preSortedOrderPrefix is null"); this.hashSymbol = requireNonNull(hashSymbol, "hashSymbol is null"); } @Override public boolean shapeMatches(PlanNode node) { return node instanceof WindowNode; } @Override public MatchResult detailMatches(PlanNode node, StatsProvider stats, Session session, Metadata metadata, SymbolAliases symbolAliases) { checkState(shapeMatches(node), "Plan testing framework error: shapeMatches returned false in detailMatches in %s", this.getClass().getName()); WindowNode windowNode = (WindowNode) node; if (!prePartitionedInputs .map(expectedInputs -> expectedInputs.stream() .map(alias -> alias.toSymbol(symbolAliases)) .collect(toImmutableSet()) .equals(windowNode.getPrePartitionedInputs().stream().map(VariableReferenceExpression::getName).map(Symbol::new).collect(toImmutableSet()))) .orElse(true)) { return NO_MATCH; } if (!specification .map(expectedSpecification -> matchSpecification(windowNode.getSpecification(), expectedSpecification.getExpectedValue(symbolAliases))) .orElse(true)) { return NO_MATCH; } if (!preSortedOrderPrefix .map(Integer.valueOf(windowNode.getPreSortedOrderPrefix())::equals) .orElse(true)) { return NO_MATCH; } if (!hashSymbol .map(expectedHashSymbol -> expectedHashSymbol .map(alias -> alias.toSymbol(symbolAliases)) .map(Symbol::getName) .equals(windowNode.getHashVariable().map(VariableReferenceExpression::getName))) .orElse(true)) { return NO_MATCH; } /* * Window functions produce a symbol (the result of the function call) that we might * want to bind to an alias so we can reference it further up the tree. As such, * they need to be matched with an Alias matcher so we can bind the symbol if desired. */ return match(); } @Override public String toString() { // Only include fields in the description if they are actual constraints. return toStringHelper(this) .omitNullValues() .add("prePartitionedInputs", prePartitionedInputs.orElse(null)) .add("specification", specification.orElse(null)) .add("preSortedOrderPrefix", preSortedOrderPrefix.orElse(null)) .add("hashSymbol", hashSymbol.orElse(null)) .toString(); } /** * By default, matches any WindowNode. Users add additional constraints by * calling the various member functions of the Builder, typically named according * to the field names of WindowNode. */ public static class Builder { private final PlanMatchPattern source; private Optional<Set<SymbolAlias>> prePartitionedInputs = Optional.empty(); private Optional<ExpectedValueProvider<WindowNode.Specification>> specification = Optional.empty(); private Optional<Integer> preSortedOrderPrefix = Optional.empty(); private List<AliasMatcher> windowFunctionMatchers = new LinkedList<>(); private Optional<Optional<SymbolAlias>> hashSymbol = Optional.empty(); Builder(PlanMatchPattern source) { this.source = requireNonNull(source, "source is null"); } public Builder prePartitionedInputs(Set<String> prePartitionedInputs) { requireNonNull(prePartitionedInputs, "prePartitionedInputs is null"); this.prePartitionedInputs = Optional.of( prePartitionedInputs.stream() .map(SymbolAlias::new) .collect(toImmutableSet())); return this; } public Builder specification( List<String> partitionBy, List<String> orderBy, Map<String, SortOrder> orderings) { return specification(PlanMatchPattern.specification(partitionBy, orderBy, orderings)); } public Builder specification(ExpectedValueProvider<WindowNode.Specification> specification) { requireNonNull(specification, "specification is null"); this.specification = Optional.of(specification); return this; } public Builder preSortedOrderPrefix(int preSortedOrderPrefix) { this.preSortedOrderPrefix = Optional.of(preSortedOrderPrefix); return this; } public Builder addFunction(String outputAlias, ExpectedValueProvider<FunctionCall> functionCall) { return addFunction(Optional.of(outputAlias), functionCall); } public Builder addFunction(ExpectedValueProvider<FunctionCall> functionCall) { return addFunction(Optional.empty(), functionCall); } private Builder addFunction(Optional<String> outputAlias, ExpectedValueProvider<FunctionCall> functionCall) { windowFunctionMatchers.add(new AliasMatcher(outputAlias, new WindowFunctionMatcher(functionCall, Optional.empty(), Optional.empty()))); return this; } public Builder addFunction( String outputAlias, ExpectedValueProvider<FunctionCall> functionCall, FunctionHandle functionHandle, ExpectedValueProvider<WindowNode.Frame> frame) { windowFunctionMatchers.add( new AliasMatcher( Optional.of(outputAlias), new WindowFunctionMatcher(functionCall, Optional.of(functionHandle), Optional.of(frame)))); return this; } /** * Matches only if WindowNode.getHashVariable() is a non-empty option containing hashVariable. */ public Builder hashSymbol(String hashSymbol) { requireNonNull(hashSymbol, "hashSymbol is null"); this.hashSymbol = Optional.of(Optional.of(new SymbolAlias(hashSymbol))); return this; } PlanMatchPattern build() { PlanMatchPattern result = node(WindowNode.class, source).with( new WindowMatcher( prePartitionedInputs, specification, preSortedOrderPrefix, hashSymbol)); windowFunctionMatchers.forEach(result::with); return result; } } }
/* * Copyright (c) 2010 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.api.client.http.javanet; import com.google.api.client.http.LowLevelHttpResponse; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; import java.util.ArrayList; import java.util.List; import java.util.Map; final class NetHttpResponse extends LowLevelHttpResponse { private final HttpURLConnection connection; private final int responseCode; private final String responseMessage; private final ArrayList<String> headerNames = new ArrayList<String>(); private final ArrayList<String> headerValues = new ArrayList<String>(); NetHttpResponse(HttpURLConnection connection) throws IOException { this.connection = connection; int responseCode = connection.getResponseCode(); this.responseCode = responseCode == -1 ? 0 : responseCode; responseMessage = connection.getResponseMessage(); List<String> headerNames = this.headerNames; List<String> headerValues = this.headerValues; for (Map.Entry<String, List<String>> entry : connection.getHeaderFields().entrySet()) { String key = entry.getKey(); if (key != null) { for (String value : entry.getValue()) { if (value != null) { headerNames.add(key); headerValues.add(value); } } } } } @Override public int getStatusCode() { return responseCode; } /** * {@inheritDoc} * * <p>Returns {@link HttpURLConnection#getInputStream} when it doesn't throw {@link IOException}, * otherwise it returns {@link HttpURLConnection#getErrorStream}. * * <p>Upgrade warning: in prior version 1.16 {@link #getContent()} returned {@link * HttpURLConnection#getInputStream} only when the status code was successful. Starting with * version 1.17 it returns {@link HttpURLConnection#getInputStream} when it doesn't throw {@link * IOException}, otherwise it returns {@link HttpURLConnection#getErrorStream} * * <p>Upgrade warning: in versions prior to 1.20 {@link #getContent()} returned {@link * HttpURLConnection#getInputStream()} or {@link HttpURLConnection#getErrorStream()}, both of * which silently returned -1 for read() calls when the connection got closed in the middle of * receiving a response. This is highly likely a bug from JDK's {@link HttpURLConnection}. Since * version 1.20, the bytes read off the wire will be checked and an {@link IOException} will be * thrown if the response is not fully delivered when the connection is closed by server for * whatever reason, e.g., server restarts. Note though that this is a best-effort check: when the * response is chunk encoded, we have to rely on the underlying HTTP library to behave correctly. */ @Override public InputStream getContent() throws IOException { InputStream in = null; try { in = connection.getInputStream(); } catch (IOException ioe) { in = connection.getErrorStream(); } return in == null ? null : new SizeValidatingInputStream(in); } @Override public String getContentEncoding() { return connection.getContentEncoding(); } @Override public long getContentLength() { String string = connection.getHeaderField("Content-Length"); return string == null ? -1 : Long.parseLong(string); } @Override public String getContentType() { return connection.getHeaderField("Content-Type"); } @Override public String getReasonPhrase() { return responseMessage; } @Override public String getStatusLine() { String result = connection.getHeaderField(0); return result != null && result.startsWith("HTTP/1.") ? result : null; } @Override public int getHeaderCount() { return headerNames.size(); } @Override public String getHeaderName(int index) { return headerNames.get(index); } @Override public String getHeaderValue(int index) { return headerValues.get(index); } /** * Closes the connection to the HTTP server. * * @since 1.4 */ @Override public void disconnect() { connection.disconnect(); } /** * A wrapper arround the base {@link InputStream} that validates EOF returned by the read calls. * * @since 1.20 */ private final class SizeValidatingInputStream extends FilterInputStream { private long bytesRead = 0; public SizeValidatingInputStream(InputStream in) { super(in); } /** * java.io.InputStream#read(byte[], int, int) swallows IOException thrown from read() so we have * to override it. * * @see * "http://grepcode.com/file/repository.grepcode.com/java/root/jdk/openjdk/8-b132/java/io/InputStream.java#185" */ @Override public int read(byte[] b, int off, int len) throws IOException { int n = in.read(b, off, len); if (n == -1) { throwIfFalseEOF(); } else { bytesRead += n; } return n; } @Override public int read() throws IOException { int n = in.read(); if (n == -1) { throwIfFalseEOF(); } else { bytesRead++; } return n; } @Override public long skip(long len) throws IOException { long n = in.skip(len); bytesRead += n; return n; } // Throws an IOException if gets an EOF in the middle of a response. private void throwIfFalseEOF() throws IOException { long contentLength = getContentLength(); if (contentLength == -1) { // If a Content-Length header is missing, there's nothing we can do. return; } // According to RFC2616, message-body is prohibited in responses to certain requests, e.g., // HEAD. Nevertheless an entity-header (possibly with non-zero Content-Length) may be present. // Thus we exclude the case where bytesRead == 0. // // See http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.4 for details. if (bytesRead != 0 && bytesRead < contentLength) { throw new IOException( "Connection closed prematurely: bytesRead = " + bytesRead + ", Content-Length = " + contentLength); } } } }
package epsylon.evaluate; import java.util.Iterator; import java.util.Set; import epsylon.ast.AbsOpExp; import epsylon.ast.AddExp; import epsylon.ast.AllSetExp; import epsylon.ast.AndExp; import epsylon.ast.BackslashExp; import epsylon.ast.BoolLit; import epsylon.ast.ContainsExp; import epsylon.ast.DivExp; import epsylon.ast.EqExp; import epsylon.ast.ExistsExp; import epsylon.ast.ForallExp; import epsylon.ast.ForallSetExp; import epsylon.ast.GeqExp; import epsylon.ast.GthExp; import epsylon.ast.HashExp; import epsylon.ast.Ident; import epsylon.ast.ImpliesExp; import epsylon.ast.IntervalExp; import epsylon.ast.LeqExp; import epsylon.ast.LthExp; import epsylon.ast.ModExp; import epsylon.ast.MulExp; import epsylon.ast.NegExp; import epsylon.ast.NeqExp; import epsylon.ast.Node; import epsylon.ast.NotExp; import epsylon.ast.NumLit; import epsylon.ast.OrExp; import epsylon.ast.SetExp; import epsylon.ast.StrictAndExp; import epsylon.ast.StrictOrExp; import epsylon.ast.SubExp; import epsylon.environment.DynamicEnvironment; import epsylon.exception.EvaluationException; import epsylon.interfaces.Exp; import epsylon.interfaces.Value; import epsylon.interfaces.Visitor; /** * The Evaluator computes the final result of an Abstract Syntax Tree, by calculating the result of each expression. * The AST must have been built with a Parser object. Typechecking not required. * * The evaluator might fail if dynamic types are not correct or if a mathematical operation fails. * The evaluator will throw an EvaluationException, if one of these situation happens. * * The Evaluator uses the default constructor. To start evaluating, call evaluate(Node), passing a valid AST. * * @author Alessio Moiso * @version 1.0 */ public class Evaluator implements Visitor<Value> { DynamicEnvironment environment; public Evaluator() { environment = new DynamicEnvironment(); } /** * Starts evaluating the given AST. * * @param tree A valid AST. * @return The final result of the evaluation. */ public Value evaluate(Node tree) { return tree.accept(this); } // Compares two values (Int or Set only) private Boolean compare(Value e1, Value e2) { if (e1 instanceof IntValue && e2 instanceof IntValue) { return ((IntValue) e1).getValue().intValue() <= ((IntValue) e2).getValue().intValue(); } else if (e1 instanceof SetValue && e2 instanceof SetValue) { Set<Value> hashSet1 = (((SetValue) e1).getValue()); Set<Value> hashSet2 = (((SetValue) e2).getValue()); for (Value item : hashSet1) { if (!hashSet2.contains(item)) { return false; } } } throw new EvaluationException("Compare operation undefined between " + e1.toString() + " and " + e2.toString()); } // Returns the value of the mathematical addition. @Override public Value visit(AddExp exp) { Iterator<Exp> children = exp.getChildren(); Value val = children.next().accept(this); Value val2 = children.next().accept(this); if (!(val instanceof IntValue) || !(val2 instanceof IntValue)) { throw new EvaluationException(exp, val, val2); } return new IntValue(((IntValue) val).getValue().intValue() + ((IntValue) val2).getValue().intValue()); } // Returns the result of the All expression. @Override public Value visit(AllSetExp exp) { environment.pushNewLevel(); Iterator<Exp> children = exp.getChildren(); Ident ident = (Ident) children.next(); environment.checkAndPut(ident, null); Value set = children.next().accept(this); AbsOpExp formula = (AbsOpExp) children.next(); if (!(set instanceof SetValue)) { throw new EvaluationException(exp, set); } Set<Value> hashSet = ((SetValue) set).getValue(); SetValue builtSet = new SetValue(); for (Value value : hashSet) { environment.put(ident, value); if (((BoolValue)formula.accept(this)).getValue()) { builtSet.add(value); } } environment.popCurrentLevel(); return builtSet; } // Returns the result of the And expression. @Override public Value visit(AndExp exp) { Iterator<Exp> children = exp.getChildren(); Value val = children.next().accept(this); Value val2 = children.next().accept(this); if (!(val instanceof BoolValue) || !(val2 instanceof BoolValue)) { throw new EvaluationException(exp, val, val2); } return new BoolValue(((BoolValue) val).getValue().booleanValue() && ((BoolValue) val2).getValue().booleanValue()); } // Returns the resulting set from a subtraction. Uses the removeAll Java function. @Override public Value visit(BackslashExp exp) { Iterator<Exp> children = exp.getChildren(); Value e1 = children.next().accept(this); Value e2 = children.next().accept(this); if (!(e1 instanceof SetValue) || !(e2 instanceof SetValue)) { throw new EvaluationException(exp, e1, e2); } Set<Value> hashSet = ((SetValue) e1).getValue(); Set<Value> hashSet2 = ((SetValue) e2).getValue(); hashSet.removeAll(hashSet2); SetValue builtSet = new SetValue(); builtSet.addAll(hashSet); return builtSet; } // Returns the value of a Bool. @Override public Value visit(BoolLit exp) { return new BoolValue(exp.getValue()); } // Returns the result of a Contains expression. @Override public Value visit(ContainsExp exp) { Iterator<Exp> children = exp.getChildren(); Value val = children.next().accept(this); Value val2 = children.next().accept(this); if (!(val instanceof SetValue)) { throw new EvaluationException(exp, val, val2); } Set<Value> items = ((SetValue) val).getValue(); for (Value item : items) { if (item.equals(val2)) { return new BoolValue(true); } } return new BoolValue(false); } // Returns the result the result of a mathematical division. Throws an exception if the second value is 0. @Override public Value visit(DivExp exp) { Iterator<Exp> children = exp.getChildren(); Value val = children.next().accept(this); Value val2 = children.next().accept(this); if (!(val instanceof IntValue) || !(val2 instanceof IntValue)) { throw new EvaluationException(exp, val, val2); } if (((IntValue)val2).getValue() == 0) { throw new EvaluationException("Math division by zero is undefined."); } return new IntValue(((IntValue) val).getValue().intValue() / ((IntValue) val2).getValue().intValue()); } // Returns whether two values are the same. @Override public Value visit(EqExp exp) { Iterator<Exp> children = exp.getChildren(); Value val = children.next().accept(this); Value val2 = children.next().accept(this); return new BoolValue(val.equals(val2)); } // Returns the result of an Exists expression. @Override public Value visit(ExistsExp exp) { environment.pushNewLevel(); Iterator<Exp> children = exp.getChildren(); Ident ident = (Ident) children.next(); environment.checkAndPut(ident, null); Value set = children.next().accept(this); Set<Value> hashSet = ((SetValue) set).getValue(); AbsOpExp condition = (AbsOpExp)children.next(); for (Value value : hashSet) { environment.put(ident, value); Value bool = condition.accept(this); if (((BoolValue) bool).getValue()) { environment.popCurrentLevel(); return new BoolValue(new Boolean(true)); } } environment.popCurrentLevel(); return new BoolValue(new Boolean(false)); } // Returns the value of a Forall expression. @Override public Value visit(ForallExp exp) { environment.pushNewLevel(); Iterator<Exp> children = exp.getChildren(); Ident ident = (Ident) children.next(); environment.checkAndPut(ident, null); Value set = children.next().accept(this); Set<Value> hashSet = ((SetValue) set).getValue(); AbsOpExp formula = (AbsOpExp)children.next(); for (Value value : hashSet) { environment.put(ident, value); Value bool = formula.accept(this); if (!((BoolValue) bool).getValue()) { environment.popCurrentLevel(); return new BoolValue(new Boolean(false)); } } environment.popCurrentLevel(); return new BoolValue(new Boolean(true)); } // Returns the value of a ForallSet expression. @Override public Value visit(ForallSetExp exp) { environment.pushNewLevel(); Iterator<Exp> children = exp.getChildren(); Ident ident = (Ident) children.next(); environment.checkAndPut(ident, null); Value set = children.next().accept(this); AbsOpExp formula = (AbsOpExp) children.next(); if (!(set instanceof SetValue)) { throw new EvaluationException(exp, set); } Set<Value> hashSet = ((SetValue) set).getValue(); SetValue builtSet = new SetValue(); for (Value value : hashSet) { environment.put(ident, value); builtSet.add(formula.accept(this)); } environment.popCurrentLevel(); return builtSet; } // Returns the result of a Greater Than of expression. @Override public Value visit(GeqExp exp) { Iterator<Exp> children = exp.getChildren(); Value e1 = children.next().accept(this); Value e2 = children.next().accept(this); if (!e1.equals(e2)) { return new BoolValue(compare(e2, e1)); } return new BoolValue(new Boolean(false)); } // Returns the result of a Greater Than or Equal to expression. @Override public Value visit(GthExp exp) { Iterator<Exp> children = exp.getChildren(); Value e1 = children.next().accept(this); Value e2 = children.next().accept(this); return new BoolValue(compare(e2, e1)); } // Returns the cardinality of a set. @Override public Value visit(HashExp exp) { Iterator<Exp> children = exp.getChildren(); Value child = children.next().accept(this); if (!(child instanceof SetValue)) { throw new EvaluationException(exp, child); } return new IntValue(((SetValue) child).getValue().size()); } // Returns the value associated with an identifier. @Override public Value visit(Ident exp) { return environment.lookupAndCheck(exp); } // Returns the value of an Implies expression. @Override public Value visit(ImpliesExp exp) { Iterator<Exp> children = exp.getChildren(); Value e1 = children.next().accept(this); Value e2 = children.next().accept(this); if (!(e1 instanceof BoolValue) && !(e2 instanceof BoolValue)) { throw new EvaluationException(exp, e1, e2); } return new BoolValue(!((BoolValue) e1).getValue().booleanValue() || ((BoolValue) e1).getValue().booleanValue()); } // Returns the set resulting from an Interval. @Override public Value visit(IntervalExp exp) { Iterator<Exp> children = exp.getChildren(); Value start = children.next().accept(this); Value end = children.next().accept(this); if (!(start instanceof IntValue) && !(end instanceof IntValue)) { throw new EvaluationException(exp, start, end); } int startInt = ((IntValue) start).getValue(); int endInt = ((IntValue) end).getValue(); SetValue set = new SetValue(); for (; startInt <= endInt; startInt++) { set.add(new IntValue(startInt)); } return set; } // Returns the result of a Less Than expression. @Override public Value visit(LeqExp exp) { Iterator<Exp> children = exp.getChildren(); Value e1 = children.next().accept(this); Value e2 = children.next().accept(this); return new BoolValue(compare(e1, e2)); } // Returns the result of a Less Than or Equal to expression. @Override public Value visit(LthExp exp) { Iterator<Exp> children = exp.getChildren(); Value e1 = children.next().accept(this); Value e2 = children.next().accept(this); if (!e1.equals(e2)) { return new BoolValue(compare(e1, e2)); } return new BoolValue(new Boolean(false)); } // Returns the result of a mathematical module. @Override public Value visit(ModExp exp) { Iterator<Exp> children = exp.getChildren(); Value val = children.next().accept(this); Value val2 = children.next().accept(this); if (!(val instanceof IntValue) || !(val2 instanceof IntValue)) { throw new EvaluationException(exp, val, val2); } if (((IntValue)val2).getValue() == 0) { throw new EvaluationException("Mathematical division by zero is undefined."); } return new IntValue(((IntValue) val).getValue().intValue() % ((IntValue) val2).getValue().intValue()); } // Returns the result of a mathematical multiplication. @Override public Value visit(MulExp exp) { Iterator<Exp> children = exp.getChildren(); Value val = children.next().accept(this); Value val2 = children.next().accept(this); if (!(val instanceof IntValue) || !(val2 instanceof IntValue)) { throw new EvaluationException(exp, val, val2); } return new IntValue(((IntValue) val).getValue().intValue() * ((IntValue) val2).getValue().intValue()); } // Returns the negative value of an Int. @Override public Value visit(NegExp exp) { Iterator<Exp> children = exp.getChildren(); Value val = children.next().accept(this); if (!(val instanceof IntValue)) { throw new EvaluationException(exp, val); } return new IntValue(-((IntValue) val).getValue().intValue()); } // Returns whether two objects are not the same. @Override public Value visit(NeqExp exp) { Iterator<Exp> children = exp.getChildren(); Value val = children.next().accept(this); Value val2 = children.next().accept(this); boolean equals = (val.equals(val2)); return new BoolValue(!equals); } // Returns the opposite value of a Bool. @Override public Value visit(NotExp exp) { Iterator<Exp> children = exp.getChildren(); Value val = children.next().accept(this); if (!(val instanceof BoolValue)) { throw new EvaluationException(exp, val); } return new BoolValue(!((BoolValue) val).getValue().booleanValue()); } // Returns the value of an Int. @Override public Value visit(NumLit exp) { return new IntValue(exp.getValue()); } // Returns the result of an Or expression. @Override public Value visit(OrExp exp) { Iterator<Exp> children = exp.getChildren(); Value val = children.next().accept(this); Value val2 = children.next().accept(this); if (!(val instanceof BoolValue) || !(val2 instanceof BoolValue)) { throw new EvaluationException(exp, val, val2); } return new BoolValue(((BoolValue) val).getValue().booleanValue() || ((BoolValue) val2).getValue().booleanValue()); } // Returns the Set wrapped in a Set expression. @Override public Value visit(SetExp exp) { Iterator<Exp> children = exp.getChildren(); SetValue set = new SetValue(); while (children.hasNext()) { Value value = children.next().accept(this); set.add(value); } return set; } // Returns the intersection of two sets. Uses the retainAll() Java function. @Override public Value visit(StrictAndExp exp) { Iterator<Exp> children = exp.getChildren(); Value e1 = children.next().accept(this); Value e2 = children.next().accept(this); if (!(e1 instanceof SetValue) || !(e2 instanceof SetValue)) { throw new EvaluationException(exp, e1, e2); } Set<Value> hashSet = ((SetValue) e1).getValue(); Set<Value> hashSet2 = ((SetValue) e2).getValue(); hashSet.retainAll(hashSet2); SetValue builtSet = new SetValue(); builtSet.addAll(hashSet); return builtSet; } // Returns the union of two sets. Uses the addAll() Java function. @Override public Value visit(StrictOrExp exp) { Iterator<Exp> children = exp.getChildren(); Value e1 = children.next().accept(this); Value e2 = children.next().accept(this); if (!(e1 instanceof SetValue) || !(e2 instanceof SetValue)) { throw new EvaluationException(exp, e1, e2); } Set<Value> hashSet = ((SetValue) e1).getValue(); Set<Value> hashSet2 = ((SetValue) e2).getValue(); hashSet.addAll(hashSet2); SetValue builtSet = new SetValue(); builtSet.addAll(hashSet); return builtSet; } // Returns the result of a mathematical subtraction. @Override public Value visit(SubExp exp) { Iterator<Exp> children = exp.getChildren(); Value val = children.next().accept(this); Value val2 = children.next().accept(this); if (!(val instanceof IntValue) && !(val2 instanceof IntValue)) { throw new EvaluationException(exp, val, val2); } return new IntValue(((IntValue) val).getValue().intValue() - ((IntValue) val2).getValue().intValue()); } // Starts visiting the source node. @Override public Value visit(Node node) { return node.getExp().accept(this); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.service; import org.apache.ignite.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*; import java.util.*; import java.util.concurrent.*; /** * Tests service reassignment. */ public class GridServiceReassignmentSelfTest extends GridServiceProcessorAbstractSelfTest { /** {@inheritDoc} */ @Override protected int nodeCount() { return 1; } /** * @throws Exception If failed. */ public void testClusterSingleton() throws Exception { checkReassigns(1, 1); } /** * @throws Exception If failed. */ public void testNodeSingleton() throws Exception { checkReassigns(0, 1); } /** * @throws Exception If failed. */ public void testLimited1() throws Exception { checkReassigns(5, 2); } /** * @throws Exception If failed. */ public void testLimited2() throws Exception { checkReassigns(7, 3); } /** * @throws Exception If failed. */ private CounterService proxy(Ignite g) throws Exception { return g.services().serviceProxy("testService", CounterService.class, false); } /** * @param total Total number of services. * @param maxPerNode Maximum number of services per node. * @throws IgniteCheckedException If failed. */ private void checkReassigns(int total, int maxPerNode) throws Exception { CountDownLatch latch = new CountDownLatch(nodeCount()); DummyService.exeLatch("testService", latch); grid(0).services().deployMultiple("testService", new CounterServiceImpl(), total, maxPerNode); for (int i = 0; i < 10; i++) proxy(randomGrid()).increment(); Collection<Integer> startedGrids = new HashSet<>(); try { startedGrids.add(0); int maxTopSize = 5; boolean grow = true; Random rnd = new Random(); for (int i = 0; i < 20; i++) { if (grow) { assert startedGrids.size() < maxTopSize; int gridIdx = nextAvailableIdx(startedGrids, maxTopSize, rnd); startGrid(gridIdx); startedGrids.add(gridIdx); if (startedGrids.size() == maxTopSize) grow = false; } else { assert startedGrids.size() > 1; int gridIdx = nextRandomIdx(startedGrids, rnd); stopGrid(gridIdx); startedGrids.remove(gridIdx); if (startedGrids.size() == 1) grow = true; } for (int attempt = 0; attempt <= 10; ++attempt) { U.sleep(500); if (checkServices(total, maxPerNode, F.first(startedGrids), attempt == 10)) break; } } } finally { grid(F.first(startedGrids)).services().cancel("testService"); stopAllGrids(); startGrid(0); } } /** * Checks services assignments. * * @param total Total number of services. * @param maxPerNode Maximum number of services per node. * @param gridIdx Grid index to check. * @param lastTry Last try flag. * @throws Exception If failed. * @return {@code True} if check passed. */ private boolean checkServices(int total, int maxPerNode, int gridIdx, boolean lastTry) throws Exception { IgniteEx grid = grid(gridIdx); IgniteInternalCache<GridServiceAssignmentsKey, GridServiceAssignments> cache = grid.utilityCache(); GridServiceAssignments assignments = cache.get(new GridServiceAssignmentsKey("testService")); Collection<UUID> nodes = F.viewReadOnly(grid.cluster().nodes(), F.node2id()); assertNotNull("Grid assignments object is null", assignments); int sum = 0; for (Map.Entry<UUID, Integer> entry : assignments.assigns().entrySet()) { UUID nodeId = entry.getKey(); if (!lastTry && !nodes.contains(nodeId)) return false; assertTrue("Dead node is in assignments: " + nodeId, nodes.contains(nodeId)); Integer nodeCnt = entry.getValue(); if (maxPerNode > 0) assertTrue("Max per node limit exceeded [nodeId=" + nodeId + ", max=" + maxPerNode + ", actual=" + nodeCnt, nodeCnt <= maxPerNode); sum += nodeCnt; } if (total > 0) assertTrue("Total number of services limit exceeded [sum=" + sum + ", assigns=" + assignments.assigns() + ']', sum <= total); if (!lastTry && proxy(grid).get() != 10) return false; assertEquals(10, proxy(grid).get()); return true; } /** * Gets next available index. * * @param startedGrids Indexes for started grids. * @param maxTopSize Max topology size. * @return Next available index. */ private int nextAvailableIdx(Collection<Integer> startedGrids, int maxTopSize, Random rnd) { while (true) { int idx = rnd.nextInt(maxTopSize); if (!startedGrids.contains(idx)) return idx; } } /** * @param startedGrids Started grids. * @param rnd Random numbers generator. * @return Randomly chosen started grid. */ private int nextRandomIdx(Iterable<Integer> startedGrids, Random rnd) { while (true) { for (Integer idx : startedGrids) { if (rnd.nextBoolean()) return idx; } } } }
/* * Copy from Alibaba Dubbo Framework * */ package com.alibaba.dubbo.common.utils; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Matcher; public abstract class Wrapper { private static AtomicLong WRAPPER_CLASS_COUNTER = new AtomicLong(0); private static final Map<Class<?>, Wrapper> WRAPPER_MAP = new ConcurrentHashMap<Class<?>, Wrapper>(); //class wrapper map private static final String[] EMPTY_STRING_ARRAY = new String[0]; private static final String[] OBJECT_METHODS = new String[]{"getClass", "hashCode", "toString", "equals"}; private static final Wrapper OBJECT_WRAPPER = new Wrapper(){ public String[] getMethodNames(){ return OBJECT_METHODS; } public String[] getDeclaredMethodNames(){ return OBJECT_METHODS; } public String[] getPropertyNames(){ return EMPTY_STRING_ARRAY; } public Class<?> getPropertyType(String pn){ return null; } public Object getPropertyValue(Object instance, String pn) throws NoSuchPropertyException{ throw new NoSuchPropertyException("Property [" + pn + "] not found."); } public void setPropertyValue(Object instance, String pn, Object pv) throws NoSuchPropertyException{ throw new NoSuchPropertyException("Property [" + pn + "] not found."); } public boolean hasProperty(String name){ return false; } public Object invokeMethod(Object instance, String mn, Class<?>[] types, Object[] args) throws NoSuchMethodException { if( "getClass".equals(mn) ) return instance.getClass(); if( "hashCode".equals(mn) ) return instance.hashCode(); if( "toString".equals(mn) ) return instance.toString(); if( "equals".equals(mn) ) { if( args.length == 1 ) return instance.equals(args[0]); throw new IllegalArgumentException("Invoke method [" + mn + "] argument number error."); } throw new NoSuchMethodException("Method [" + mn + "] not found."); } }; /** * get wrapper. * * @param c Class instance. * @return Wrapper instance(not null). */ public static Wrapper getWrapper(Class<?> c) { while( ClassGenerator.isDynamicClass(c) ) // can not wrapper on dynamic class. c = c.getSuperclass(); if( c == Object.class ) return OBJECT_WRAPPER; Wrapper ret = WRAPPER_MAP.get(c); if( ret == null ) { ret = makeWrapper(c); WRAPPER_MAP.put(c,ret); } return ret; } /** * get property name array. * * @return property name array. */ abstract public String[] getPropertyNames(); /** * get property type. * * @param pn property name. * @return Property type or nul. */ abstract public Class<?> getPropertyType(String pn); /** * has property. * * @param name property name. * @return has or has not. */ abstract public boolean hasProperty(String name); /** * get property value. * * @param instance instance. * @param pn property name. * @return value. */ abstract public Object getPropertyValue(Object instance, String pn) throws NoSuchPropertyException, IllegalArgumentException; /** * set property value. * * @param instance instance. * @param pn property name. * @param pv property value. */ abstract public void setPropertyValue(Object instance, String pn, Object pv) throws NoSuchPropertyException, IllegalArgumentException; /** * get property value. * * @param instance instance. * @param pns property name array. * @return value array. */ public Object[] getPropertyValues(Object instance, String[] pns) throws NoSuchPropertyException, IllegalArgumentException { Object[] ret = new Object[pns.length]; for(int i=0;i<ret.length;i++) ret[i] = getPropertyValue(instance, pns[i]); return ret; } /** * set property value. * * @param instance instance. * @param pns property name array. * @param pvs property value array. */ public void setPropertyValues(Object instance, String[] pns, Object[] pvs) throws NoSuchPropertyException, IllegalArgumentException { if( pns.length != pvs.length ) throw new IllegalArgumentException("pns.length != pvs.length"); for(int i=0;i<pns.length;i++) setPropertyValue(instance, pns[i], pvs[i]); } /** * get method name array. * * @return method name array. */ abstract public String[] getMethodNames(); /** * get method name array. * * @return method name array. */ abstract public String[] getDeclaredMethodNames(); /** * has method. * * @param name method name. * @return has or has not. */ public boolean hasMethod(String name) { for( String mn : getMethodNames() ) if( mn.equals(name) ) return true; return false; } /** * invoke method. * * @param instance instance. * @param mn method name. * @param types * @param args argument array. * @return return value. */ abstract public Object invokeMethod(Object instance, String mn, Class<?>[] types, Object[] args) throws NoSuchMethodException, InvocationTargetException; private static Wrapper makeWrapper(Class<?> c) { if( c.isPrimitive() ) throw new IllegalArgumentException("Can not create wrapper for primitive type: " + c); String name = c.getName(); ClassLoader cl = ClassHelper.getCallerClassLoader(Wrapper.class); StringBuilder c1 = new StringBuilder("public void setPropertyValue(Object o, String n, Object v){ "); StringBuilder c2 = new StringBuilder("public Object getPropertyValue(Object o, String n){ "); StringBuilder c3 = new StringBuilder("public Object invokeMethod(Object o, String n, Class[] p, Object[] v) throws " + InvocationTargetException.class.getName() + "{ "); c1.append(name).append(" w; try{ w = ((").append(name).append(")$1); }catch(Throwable e){ throw new IllegalArgumentException(e); }"); c2.append(name).append(" w; try{ w = ((").append(name).append(")$1); }catch(Throwable e){ throw new IllegalArgumentException(e); }"); c3.append(name).append(" w; try{ w = ((").append(name).append(")$1); }catch(Throwable e){ throw new IllegalArgumentException(e); }"); Map<String, Class<?>> pts = new HashMap<String, Class<?>>(); // <property name, property types> Map<String, Method> ms = new LinkedHashMap<String, Method>(); // <method desc, Method instance> List<String> mns = new ArrayList<String>(); // method names. List<String> dmns = new ArrayList<String>(); // declaring method names. // get all public field. for( Field f : c.getFields() ) { String fn = f.getName(); Class<?> ft = f.getType(); if( Modifier.isStatic(f.getModifiers()) || Modifier.isTransient(f.getModifiers()) ) continue; c1.append(" if( $2.equals(\"").append(fn).append("\") ){ w.").append(fn).append("=").append(arg(ft, "$3")).append("; return; }"); c2.append(" if( $2.equals(\"").append(fn).append("\") ){ return ($w)w.").append(fn).append("; }"); pts.put(fn, ft); } Method[] methods = c.getMethods(); // get all public method. boolean hasMethod = hasMethods(methods); if( hasMethod ){ c3.append(" try{"); } for( Method m : methods ) { if( m.getDeclaringClass() == Object.class ) //ignore Object's method. continue; String mn = m.getName(); c3.append(" if( \"").append(mn).append("\".equals( $2 ) "); int len = m.getParameterTypes().length; c3.append(" && ").append(" $3.length == ").append(len); boolean override = false; for( Method m2 : methods ) { if (m != m2 && m.getName().equals(m2.getName())) { override = true; break; } } if (override) { if (len > 0) { for (int l = 0; l < len; l ++) { c3.append(" && ").append(" $3[").append(l).append("].getName().equals(\"") .append(m.getParameterTypes()[l].getName()).append("\")"); } } } c3.append(" ) { "); if( m.getReturnType() == Void.TYPE ) c3.append(" w.").append(mn).append('(').append(args(m.getParameterTypes(), "$4")).append(");").append(" return null;"); else c3.append(" return ($w)w.").append(mn).append('(').append(args(m.getParameterTypes(), "$4")).append(");"); c3.append(" }"); mns.add(mn); if( m.getDeclaringClass() == c ) dmns.add(mn); ms.put(ReflectUtils.getDesc(m), m); } if( hasMethod ){ c3.append(" } catch(Throwable e) { " ); c3.append(" throw new java.lang.reflect.InvocationTargetException(e); " ); c3.append(" }"); } c3.append(" throw new " + NoSuchMethodException.class.getName() + "(\"Not found method \\\"\"+$2+\"\\\" in class " + c.getName() + ".\"); }"); // deal with get/set method. Matcher matcher; for( Map.Entry<String,Method> entry : ms.entrySet() ) { String md = entry.getKey(); Method method = (Method)entry.getValue(); if( ( matcher = ReflectUtils.GETTER_METHOD_DESC_PATTERN.matcher(md) ).matches() ) { String pn = propertyName(matcher.group(1)); c2.append(" if( $2.equals(\"").append(pn).append("\") ){ return ($w)w.").append(method.getName()).append("(); }"); pts.put(pn, method.getReturnType()); } else if( ( matcher = ReflectUtils.IS_HAS_CAN_METHOD_DESC_PATTERN.matcher(md) ).matches() ) { String pn = propertyName(matcher.group(1)); c2.append(" if( $2.equals(\"").append(pn).append("\") ){ return ($w)w.").append(method.getName()).append("(); }"); pts.put(pn, method.getReturnType()); } else if( ( matcher = ReflectUtils.SETTER_METHOD_DESC_PATTERN.matcher(md) ).matches() ) { Class<?> pt = method.getParameterTypes()[0]; String pn = propertyName(matcher.group(1)); c1.append(" if( $2.equals(\"").append(pn).append("\") ){ w.").append(method.getName()).append("(").append(arg(pt,"$3")).append("); return; }"); pts.put(pn, pt); } } c1.append(" throw new " + NoSuchPropertyException.class.getName() + "(\"Not found property \\\"\"+$2+\"\\\" filed or setter method in class " + c.getName() + ".\"); }"); c2.append(" throw new " + NoSuchPropertyException.class.getName() + "(\"Not found property \\\"\"+$2+\"\\\" filed or setter method in class " + c.getName() + ".\"); }"); // make class long id = WRAPPER_CLASS_COUNTER.getAndIncrement(); ClassGenerator cc = ClassGenerator.newInstance(cl); cc.setClassName( ( Modifier.isPublic(c.getModifiers()) ? Wrapper.class.getName() : c.getName() + "$sw" ) + id ); cc.setSuperClass(Wrapper.class); cc.addDefaultConstructor(); cc.addField("public static String[] pns;"); // property name array. cc.addField("public static " + Map.class.getName() + " pts;"); // property type map. cc.addField("public static String[] mns;"); // all method name array. cc.addField("public static String[] dmns;"); // declared method name array. for(int i=0,len=ms.size();i<len;i++) cc.addField("public static Class[] mts" + i + ";"); cc.addMethod("public String[] getPropertyNames(){ return pns; }"); cc.addMethod("public boolean hasProperty(String n){ return pts.containsKey($1); }"); cc.addMethod("public Class getPropertyType(String n){ return (Class)pts.get($1); }"); cc.addMethod("public String[] getMethodNames(){ return mns; }"); cc.addMethod("public String[] getDeclaredMethodNames(){ return dmns; }"); cc.addMethod(c1.toString()); cc.addMethod(c2.toString()); cc.addMethod(c3.toString()); try { Class<?> wc = cc.toClass(); // setup static field. wc.getField("pts").set(null, pts); wc.getField("pns").set(null, pts.keySet().toArray(new String[0])); wc.getField("mns").set(null, mns.toArray(new String[0])); wc.getField("dmns").set(null, dmns.toArray(new String[0])); int ix = 0; for( Method m : ms.values() ) wc.getField("mts" + ix++).set(null, m.getParameterTypes()); return (Wrapper)wc.newInstance(); // cc.toFile(); // return new Wrapper() { // @Override // public String[] getPropertyNames() { // return new String[0]; // } // // @Override // public Class<?> getPropertyType(String pn) { // return null; // } // // @Override // public boolean hasProperty(String name) { // return false; // } // // @Override // public Object getPropertyValue(Object instance, String pn) throws NoSuchPropertyException, IllegalArgumentException { // return null; // } // // @Override // public void setPropertyValue(Object instance, String pn, Object pv) throws NoSuchPropertyException, IllegalArgumentException { // // } // // @Override // public String[] getMethodNames() { // return new String[0]; // } // // @Override // public String[] getDeclaredMethodNames() { // return new String[0]; // } // // @Override // public Object invokeMethod(Object instance, String mn, Class<?>[] types, Object[] args) throws NoSuchMethodException, InvocationTargetException { // return null; // } // }; } catch(RuntimeException e) { throw e; } catch(Throwable e) { throw new RuntimeException(e.getMessage(), e); } finally { cc.release(); ms.clear(); mns.clear(); dmns.clear(); } } private static String arg(Class<?> cl, String name) { if( cl.isPrimitive() ) { if( cl == Boolean.TYPE ) return "((Boolean)" + name + ").booleanValue()"; if( cl == Byte.TYPE ) return "((Byte)" + name + ").byteValue()"; if( cl == Character.TYPE ) return "((Character)" + name + ").charValue()"; if( cl == Double.TYPE ) return "((Number)" + name + ").doubleValue()"; if( cl == Float.TYPE ) return "((Number)" + name + ").floatValue()"; if( cl == Integer.TYPE ) return "((Number)" + name + ").intValue()"; if( cl == Long.TYPE ) return "((Number)" + name + ").longValue()"; if( cl == Short.TYPE ) return "((Number)" + name + ").shortValue()"; throw new RuntimeException("Unknown primitive type: " + cl.getName()); } return "(" + ReflectUtils.getName(cl) + ")" + name; } private static String args(Class<?>[] cs,String name) { int len = cs.length; if( len == 0 ) return ""; StringBuilder sb = new StringBuilder(); for(int i=0;i<len;i++) { if( i > 0 ) sb.append(','); sb.append(arg(cs[i],name+"["+i+"]")); } return sb.toString(); } private static String propertyName(String pn) { return pn.length() == 1 || Character.isLowerCase(pn.charAt(1)) ? Character.toLowerCase(pn.charAt(0)) + pn.substring(1) : pn; } private static boolean hasMethods(Method[] methods){ if(methods == null || methods.length == 0){ return false; } for(Method m : methods){ if(m.getDeclaringClass() != Object.class){ return true; } } return false; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; /** * A query that will return only documents matching specific ids (and a type). */ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> { public static final String NAME = "ids"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); private static final ParseField TYPE_FIELD = new ParseField("type", "types", "_type"); private static final ParseField VALUES_FIELD = new ParseField("values"); private final Set<String> ids = new HashSet<>(); private final String[] types; /** * Creates a new IdsQueryBuilder without providing the types of the documents to look for */ public IdsQueryBuilder() { this.types = new String[0]; } /** * Creates a new IdsQueryBuilder by providing the types of the documents to look for */ public IdsQueryBuilder(String... types) { if (types == null) { throw new IllegalArgumentException("[ids] types cannot be null"); } this.types = types; } /** * Read from a stream. */ public IdsQueryBuilder(StreamInput in) throws IOException { super(in); types = in.readStringArray(); Collections.addAll(ids, in.readStringArray()); } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeStringArray(types); out.writeStringArray(ids.toArray(new String[ids.size()])); } /** * Returns the types used in this query */ public String[] types() { return this.types; } /** * Adds ids to the query. */ public IdsQueryBuilder addIds(String... ids) { if (ids == null) { throw new IllegalArgumentException("[ids] ids cannot be null"); } Collections.addAll(this.ids, ids); return this; } /** * Returns the ids for the query. */ public Set<String> ids() { return this.ids; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); builder.array(TYPE_FIELD.getPreferredName(), types); builder.startArray(VALUES_FIELD.getPreferredName()); for (String value : ids) { builder.value(value); } builder.endArray(); printBoostAndQueryName(builder); builder.endObject(); } public static Optional<IdsQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); List<String> ids = new ArrayList<>(); List<String> types = new ArrayList<>(); float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; String currentFieldName = null; XContentParser.Token token; boolean idsProvided = false; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { if (parseContext.getParseFieldMatcher().match(currentFieldName, VALUES_FIELD)) { idsProvided = true; while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if ((token == XContentParser.Token.VALUE_STRING) || (token == XContentParser.Token.VALUE_NUMBER)) { String id = parser.textOrNull(); if (id == null) { throw new ParsingException(parser.getTokenLocation(), "No value specified for term filter"); } ids.add(id); } else { throw new ParsingException(parser.getTokenLocation(), "Illegal value for id, expecting a string or number, got: " + token); } } } else if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { String value = parser.textOrNull(); if (value == null) { throw new ParsingException(parser.getTokenLocation(), "No type specified for term filter"); } types.add(value); } } else { throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) { types = Collections.singletonList(parser.text()); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { boost = parser.floatValue(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { queryName = parser.text(); } else { throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } } else { throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]"); } } if (!idsProvided) { throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + "] query, no ids values provided"); } IdsQueryBuilder query = new IdsQueryBuilder(types.toArray(new String[types.size()])); query.addIds(ids.toArray(new String[ids.size()])); query.boost(boost).queryName(queryName); return Optional.of(query); } @Override public String getWriteableName() { return NAME; } @Override protected Query doToQuery(QueryShardContext context) throws IOException { Query query; if (this.ids.isEmpty()) { query = Queries.newMatchNoDocsQuery("Missing ids in \"" + this.getName() + "\" query."); } else { Collection<String> typesForQuery; if (types.length == 0) { typesForQuery = context.queryTypes(); } else if (types.length == 1 && MetaData.ALL.equals(types[0])) { typesForQuery = context.getMapperService().types(); } else { typesForQuery = new HashSet<>(); Collections.addAll(typesForQuery, types); } query = new TermsQuery(UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(typesForQuery, ids)); } return query; } @Override protected int doHashCode() { return Objects.hash(ids, Arrays.hashCode(types)); } @Override protected boolean doEquals(IdsQueryBuilder other) { return Objects.equals(ids, other.ids) && Arrays.equals(types, other.types); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cloudwatchevents.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/events-2015-10-07/ListEventBuses" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListEventBusesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * Specifying this limits the results to only those event buses with names that start with the specified prefix. * </p> */ private String namePrefix; /** * <p> * The token returned by a previous call to retrieve the next set of results. * </p> */ private String nextToken; /** * <p> * Specifying this limits the number of results returned by this operation. The operation also returns a NextToken * which you can use in a subsequent operation to retrieve the next set of results. * </p> */ private Integer limit; /** * <p> * Specifying this limits the results to only those event buses with names that start with the specified prefix. * </p> * * @param namePrefix * Specifying this limits the results to only those event buses with names that start with the specified * prefix. */ public void setNamePrefix(String namePrefix) { this.namePrefix = namePrefix; } /** * <p> * Specifying this limits the results to only those event buses with names that start with the specified prefix. * </p> * * @return Specifying this limits the results to only those event buses with names that start with the specified * prefix. */ public String getNamePrefix() { return this.namePrefix; } /** * <p> * Specifying this limits the results to only those event buses with names that start with the specified prefix. * </p> * * @param namePrefix * Specifying this limits the results to only those event buses with names that start with the specified * prefix. * @return Returns a reference to this object so that method calls can be chained together. */ public ListEventBusesRequest withNamePrefix(String namePrefix) { setNamePrefix(namePrefix); return this; } /** * <p> * The token returned by a previous call to retrieve the next set of results. * </p> * * @param nextToken * The token returned by a previous call to retrieve the next set of results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token returned by a previous call to retrieve the next set of results. * </p> * * @return The token returned by a previous call to retrieve the next set of results. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token returned by a previous call to retrieve the next set of results. * </p> * * @param nextToken * The token returned by a previous call to retrieve the next set of results. * @return Returns a reference to this object so that method calls can be chained together. */ public ListEventBusesRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * <p> * Specifying this limits the number of results returned by this operation. The operation also returns a NextToken * which you can use in a subsequent operation to retrieve the next set of results. * </p> * * @param limit * Specifying this limits the number of results returned by this operation. The operation also returns a * NextToken which you can use in a subsequent operation to retrieve the next set of results. */ public void setLimit(Integer limit) { this.limit = limit; } /** * <p> * Specifying this limits the number of results returned by this operation. The operation also returns a NextToken * which you can use in a subsequent operation to retrieve the next set of results. * </p> * * @return Specifying this limits the number of results returned by this operation. The operation also returns a * NextToken which you can use in a subsequent operation to retrieve the next set of results. */ public Integer getLimit() { return this.limit; } /** * <p> * Specifying this limits the number of results returned by this operation. The operation also returns a NextToken * which you can use in a subsequent operation to retrieve the next set of results. * </p> * * @param limit * Specifying this limits the number of results returned by this operation. The operation also returns a * NextToken which you can use in a subsequent operation to retrieve the next set of results. * @return Returns a reference to this object so that method calls can be chained together. */ public ListEventBusesRequest withLimit(Integer limit) { setLimit(limit); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getNamePrefix() != null) sb.append("NamePrefix: ").append(getNamePrefix()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getLimit() != null) sb.append("Limit: ").append(getLimit()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListEventBusesRequest == false) return false; ListEventBusesRequest other = (ListEventBusesRequest) obj; if (other.getNamePrefix() == null ^ this.getNamePrefix() == null) return false; if (other.getNamePrefix() != null && other.getNamePrefix().equals(this.getNamePrefix()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getLimit() == null ^ this.getLimit() == null) return false; if (other.getLimit() != null && other.getLimit().equals(this.getLimit()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getNamePrefix() == null) ? 0 : getNamePrefix().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getLimit() == null) ? 0 : getLimit().hashCode()); return hashCode; } @Override public ListEventBusesRequest clone() { return (ListEventBusesRequest) super.clone(); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.mediaconnect.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * The settings for the source of the flow. * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediaconnect-2018-11-14/Source" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class Source implements Serializable, Cloneable, StructuredPojo { /** The type of encryption that is used on the content ingested from this source. */ private Encryption decryption; /** * A description for the source. This value is not used or seen outside of the current AWS Elemental MediaConnect * account. */ private String description; /** * The ARN of the entitlement that allows you to subscribe to content that comes from another AWS account. The * entitlement is set by the content originator and the ARN is generated as part of the originator's flow. */ private String entitlementArn; /** The IP address that the flow will be listening on for incoming content. */ private String ingestIp; /** The port that the flow will be listening on for incoming content. */ private Integer ingestPort; /** The name of the source. */ private String name; /** The ARN of the source. */ private String sourceArn; /** Attributes related to the transport stream that are used in the source. */ private Transport transport; /** * The range of IP addresses that should be allowed to contribute content to your source. These IP addresses should * be in the form of a Classless Inter-Domain Routing (CIDR) block; for example, 10.0.0.0/16. */ private String whitelistCidr; /** * The type of encryption that is used on the content ingested from this source. * * @param decryption * The type of encryption that is used on the content ingested from this source. */ public void setDecryption(Encryption decryption) { this.decryption = decryption; } /** * The type of encryption that is used on the content ingested from this source. * * @return The type of encryption that is used on the content ingested from this source. */ public Encryption getDecryption() { return this.decryption; } /** * The type of encryption that is used on the content ingested from this source. * * @param decryption * The type of encryption that is used on the content ingested from this source. * @return Returns a reference to this object so that method calls can be chained together. */ public Source withDecryption(Encryption decryption) { setDecryption(decryption); return this; } /** * A description for the source. This value is not used or seen outside of the current AWS Elemental MediaConnect * account. * * @param description * A description for the source. This value is not used or seen outside of the current AWS Elemental * MediaConnect account. */ public void setDescription(String description) { this.description = description; } /** * A description for the source. This value is not used or seen outside of the current AWS Elemental MediaConnect * account. * * @return A description for the source. This value is not used or seen outside of the current AWS Elemental * MediaConnect account. */ public String getDescription() { return this.description; } /** * A description for the source. This value is not used or seen outside of the current AWS Elemental MediaConnect * account. * * @param description * A description for the source. This value is not used or seen outside of the current AWS Elemental * MediaConnect account. * @return Returns a reference to this object so that method calls can be chained together. */ public Source withDescription(String description) { setDescription(description); return this; } /** * The ARN of the entitlement that allows you to subscribe to content that comes from another AWS account. The * entitlement is set by the content originator and the ARN is generated as part of the originator's flow. * * @param entitlementArn * The ARN of the entitlement that allows you to subscribe to content that comes from another AWS account. * The entitlement is set by the content originator and the ARN is generated as part of the originator's * flow. */ public void setEntitlementArn(String entitlementArn) { this.entitlementArn = entitlementArn; } /** * The ARN of the entitlement that allows you to subscribe to content that comes from another AWS account. The * entitlement is set by the content originator and the ARN is generated as part of the originator's flow. * * @return The ARN of the entitlement that allows you to subscribe to content that comes from another AWS account. * The entitlement is set by the content originator and the ARN is generated as part of the originator's * flow. */ public String getEntitlementArn() { return this.entitlementArn; } /** * The ARN of the entitlement that allows you to subscribe to content that comes from another AWS account. The * entitlement is set by the content originator and the ARN is generated as part of the originator's flow. * * @param entitlementArn * The ARN of the entitlement that allows you to subscribe to content that comes from another AWS account. * The entitlement is set by the content originator and the ARN is generated as part of the originator's * flow. * @return Returns a reference to this object so that method calls can be chained together. */ public Source withEntitlementArn(String entitlementArn) { setEntitlementArn(entitlementArn); return this; } /** * The IP address that the flow will be listening on for incoming content. * * @param ingestIp * The IP address that the flow will be listening on for incoming content. */ public void setIngestIp(String ingestIp) { this.ingestIp = ingestIp; } /** * The IP address that the flow will be listening on for incoming content. * * @return The IP address that the flow will be listening on for incoming content. */ public String getIngestIp() { return this.ingestIp; } /** * The IP address that the flow will be listening on for incoming content. * * @param ingestIp * The IP address that the flow will be listening on for incoming content. * @return Returns a reference to this object so that method calls can be chained together. */ public Source withIngestIp(String ingestIp) { setIngestIp(ingestIp); return this; } /** * The port that the flow will be listening on for incoming content. * * @param ingestPort * The port that the flow will be listening on for incoming content. */ public void setIngestPort(Integer ingestPort) { this.ingestPort = ingestPort; } /** * The port that the flow will be listening on for incoming content. * * @return The port that the flow will be listening on for incoming content. */ public Integer getIngestPort() { return this.ingestPort; } /** * The port that the flow will be listening on for incoming content. * * @param ingestPort * The port that the flow will be listening on for incoming content. * @return Returns a reference to this object so that method calls can be chained together. */ public Source withIngestPort(Integer ingestPort) { setIngestPort(ingestPort); return this; } /** * The name of the source. * * @param name * The name of the source. */ public void setName(String name) { this.name = name; } /** * The name of the source. * * @return The name of the source. */ public String getName() { return this.name; } /** * The name of the source. * * @param name * The name of the source. * @return Returns a reference to this object so that method calls can be chained together. */ public Source withName(String name) { setName(name); return this; } /** * The ARN of the source. * * @param sourceArn * The ARN of the source. */ public void setSourceArn(String sourceArn) { this.sourceArn = sourceArn; } /** * The ARN of the source. * * @return The ARN of the source. */ public String getSourceArn() { return this.sourceArn; } /** * The ARN of the source. * * @param sourceArn * The ARN of the source. * @return Returns a reference to this object so that method calls can be chained together. */ public Source withSourceArn(String sourceArn) { setSourceArn(sourceArn); return this; } /** * Attributes related to the transport stream that are used in the source. * * @param transport * Attributes related to the transport stream that are used in the source. */ public void setTransport(Transport transport) { this.transport = transport; } /** * Attributes related to the transport stream that are used in the source. * * @return Attributes related to the transport stream that are used in the source. */ public Transport getTransport() { return this.transport; } /** * Attributes related to the transport stream that are used in the source. * * @param transport * Attributes related to the transport stream that are used in the source. * @return Returns a reference to this object so that method calls can be chained together. */ public Source withTransport(Transport transport) { setTransport(transport); return this; } /** * The range of IP addresses that should be allowed to contribute content to your source. These IP addresses should * be in the form of a Classless Inter-Domain Routing (CIDR) block; for example, 10.0.0.0/16. * * @param whitelistCidr * The range of IP addresses that should be allowed to contribute content to your source. These IP addresses * should be in the form of a Classless Inter-Domain Routing (CIDR) block; for example, 10.0.0.0/16. */ public void setWhitelistCidr(String whitelistCidr) { this.whitelistCidr = whitelistCidr; } /** * The range of IP addresses that should be allowed to contribute content to your source. These IP addresses should * be in the form of a Classless Inter-Domain Routing (CIDR) block; for example, 10.0.0.0/16. * * @return The range of IP addresses that should be allowed to contribute content to your source. These IP addresses * should be in the form of a Classless Inter-Domain Routing (CIDR) block; for example, 10.0.0.0/16. */ public String getWhitelistCidr() { return this.whitelistCidr; } /** * The range of IP addresses that should be allowed to contribute content to your source. These IP addresses should * be in the form of a Classless Inter-Domain Routing (CIDR) block; for example, 10.0.0.0/16. * * @param whitelistCidr * The range of IP addresses that should be allowed to contribute content to your source. These IP addresses * should be in the form of a Classless Inter-Domain Routing (CIDR) block; for example, 10.0.0.0/16. * @return Returns a reference to this object so that method calls can be chained together. */ public Source withWhitelistCidr(String whitelistCidr) { setWhitelistCidr(whitelistCidr); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDecryption() != null) sb.append("Decryption: ").append(getDecryption()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getEntitlementArn() != null) sb.append("EntitlementArn: ").append(getEntitlementArn()).append(","); if (getIngestIp() != null) sb.append("IngestIp: ").append(getIngestIp()).append(","); if (getIngestPort() != null) sb.append("IngestPort: ").append(getIngestPort()).append(","); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getSourceArn() != null) sb.append("SourceArn: ").append(getSourceArn()).append(","); if (getTransport() != null) sb.append("Transport: ").append(getTransport()).append(","); if (getWhitelistCidr() != null) sb.append("WhitelistCidr: ").append(getWhitelistCidr()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof Source == false) return false; Source other = (Source) obj; if (other.getDecryption() == null ^ this.getDecryption() == null) return false; if (other.getDecryption() != null && other.getDecryption().equals(this.getDecryption()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getEntitlementArn() == null ^ this.getEntitlementArn() == null) return false; if (other.getEntitlementArn() != null && other.getEntitlementArn().equals(this.getEntitlementArn()) == false) return false; if (other.getIngestIp() == null ^ this.getIngestIp() == null) return false; if (other.getIngestIp() != null && other.getIngestIp().equals(this.getIngestIp()) == false) return false; if (other.getIngestPort() == null ^ this.getIngestPort() == null) return false; if (other.getIngestPort() != null && other.getIngestPort().equals(this.getIngestPort()) == false) return false; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getSourceArn() == null ^ this.getSourceArn() == null) return false; if (other.getSourceArn() != null && other.getSourceArn().equals(this.getSourceArn()) == false) return false; if (other.getTransport() == null ^ this.getTransport() == null) return false; if (other.getTransport() != null && other.getTransport().equals(this.getTransport()) == false) return false; if (other.getWhitelistCidr() == null ^ this.getWhitelistCidr() == null) return false; if (other.getWhitelistCidr() != null && other.getWhitelistCidr().equals(this.getWhitelistCidr()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDecryption() == null) ? 0 : getDecryption().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getEntitlementArn() == null) ? 0 : getEntitlementArn().hashCode()); hashCode = prime * hashCode + ((getIngestIp() == null) ? 0 : getIngestIp().hashCode()); hashCode = prime * hashCode + ((getIngestPort() == null) ? 0 : getIngestPort().hashCode()); hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getSourceArn() == null) ? 0 : getSourceArn().hashCode()); hashCode = prime * hashCode + ((getTransport() == null) ? 0 : getTransport().hashCode()); hashCode = prime * hashCode + ((getWhitelistCidr() == null) ? 0 : getWhitelistCidr().hashCode()); return hashCode; } @Override public Source clone() { try { return (Source) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.mediaconnect.model.transform.SourceMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2012 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.httppanel.view.syntaxhighlight; import java.awt.Color; import java.awt.Component; import java.io.IOException; import java.util.LinkedList; import java.util.List; import java.util.Vector; import java.util.regex.Pattern; import javax.swing.Action; import javax.swing.JMenuItem; import javax.swing.JPopupMenu; import javax.swing.JViewport; import javax.swing.text.BadLocationException; import javax.swing.text.DefaultHighlighter; import javax.swing.text.Highlighter; import javax.swing.text.Highlighter.HighlightPainter; import org.apache.commons.configuration.FileConfiguration; import org.apache.log4j.Logger; import org.fife.ui.rsyntaxtextarea.AbstractTokenMakerFactory; import org.fife.ui.rsyntaxtextarea.RSyntaxDocument; import org.fife.ui.rsyntaxtextarea.RSyntaxTextArea; import org.fife.ui.rsyntaxtextarea.SyntaxConstants; import org.fife.ui.rsyntaxtextarea.Theme; import org.fife.ui.rtextarea.RTextArea; import org.fife.ui.rtextarea.RTextScrollPane; import org.parosproxy.paros.Constant; import org.parosproxy.paros.extension.ExtensionPopupMenuItem; import org.parosproxy.paros.view.View; import org.zaproxy.zap.extension.httppanel.Message; import org.zaproxy.zap.extension.httppanel.view.syntaxhighlight.menus.SyntaxMenu; import org.zaproxy.zap.extension.httppanel.view.syntaxhighlight.menus.ViewMenu; import org.zaproxy.zap.extension.search.SearchMatch; import org.zaproxy.zap.utils.DisplayUtils; import org.zaproxy.zap.utils.FontUtils; import org.zaproxy.zap.utils.FontUtils.FontType; import org.zaproxy.zap.view.HighlightSearchEntry; import org.zaproxy.zap.view.HighlighterManager; public abstract class HttpPanelSyntaxHighlightTextArea extends RSyntaxTextArea { private static final long serialVersionUID = -9082089105656842054L; private static Logger log = Logger.getLogger(HttpPanelSyntaxHighlightTextArea.class); public static final String PLAIN_SYNTAX_LABEL = Constant.messages.getString("http.panel.view.syntaxtext.syntax.plain"); private static final String ANTI_ALIASING = "aa"; private static final String SHOW_LINE_NUMBERS = "linenumbers"; private static final String CODE_FOLDING = "codefolding"; private static final String WORD_WRAP = "wordwrap"; private static final String HIGHLIGHT_CURRENT_LINE = "highlightline"; private static final String FADE_CURRENT_HIGHLIGHT_LINE = "fadehighlightline"; private static final String SHOW_WHITESPACE_CHARACTERS = "whitespaces"; private static final String SHOW_NEWLINE_CHARACTERS = "newlines"; private static final String MARK_OCCURRENCES = "markocurrences"; private static final String ROUNDED_SELECTION_EDGES = "roundedselection"; private static final String BRACKET_MATCHING = "bracketmatch"; private static final String ANIMATED_BRACKET_MATCHING = "animatedbracketmatch"; private Message message; private Vector<SyntaxStyle> syntaxStyles; private boolean codeFoldingAllowed; private static SyntaxMenu syntaxMenu = null; private static ViewMenu viewMenu = null; private static TextAreaMenuItem cutAction = null; private static TextAreaMenuItem copyAction = null; private static TextAreaMenuItem pasteAction = null; private static TextAreaMenuItem deleteAction = null; private static TextAreaMenuItem undoAction = null; private static TextAreaMenuItem redoAction = null; private static TextAreaMenuItem selectAllAction = null; public HttpPanelSyntaxHighlightTextArea() { ((RSyntaxDocument) getDocument()).setTokenMakerFactory(getTokenMakerFactory()); setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_NONE); syntaxStyles = new Vector<>(); addSyntaxStyle(PLAIN_SYNTAX_LABEL, SyntaxConstants.SYNTAX_STYLE_NONE); if (syntaxMenu == null) { initActions(); } setPopupMenu(null); this.message = null; setHyperlinksEnabled(false); setAntiAliasingEnabled(true); setLineWrap(true); setHighlightCurrentLine(false); setFadeCurrentLineHighlight(false); setWhitespaceVisible(false); setEOLMarkersVisible(false); setMarkOccurrences(false); setBracketMatchingEnabled(false); setAnimateBracketMatching(false); setAutoIndentEnabled(false); setCloseCurlyBraces(false); setCloseMarkupTags(false); setClearWhitespaceLinesEnabled(false); this.setFont( FontUtils.getFontWithFallback(FontType.workPanels, this.getFont().getFontName())); if (DisplayUtils.isDarkLookAndFeel()) { try { Theme theme = Theme.load( HttpPanelSyntaxHighlightTextArea.class.getResourceAsStream( "/org/fife/ui/rsyntaxtextarea/themes/dark.xml")); theme.apply(this); } catch (IOException e) { log.error("Failed to set RSyntaxTextArea dark theme", e); } } initHighlighter(); } /** * Sets whether or not code folding is allowed, to show or not a context menu item to * enable/disable code folding. * * <p>Default is {@code false}. * * @param codeFoldingAllowed {@code true} if code folding is allowed, {@code false} otherwise. * @since 2.7.0 * @see RSyntaxTextArea#setCodeFoldingEnabled(boolean) */ protected void setCodeFoldingAllowed(boolean codeFoldingAllowed) { this.codeFoldingAllowed = codeFoldingAllowed; } /** * Tells whether or not code folding is allowed. * * @return {@code true} if code folding is allowed, {@code false} otherwise. * @since 2.7.0 */ public boolean isCodeFoldingAllowed() { return codeFoldingAllowed; } @Override protected JPopupMenu createPopupMenu() { return null; } private void initHighlighter() { HighlighterManager highlighter = HighlighterManager.getInstance(); highlighter.addHighlighterManagerListener( e -> { switch (e.getType()) { case HIGHLIGHTS_SET: case HIGHLIGHT_REMOVED: removeAllHighlights(); highlightAll(); break; case HIGHLIGHT_ADDED: highlightEntryParser(e.getHighlight()); break; } this.invalidate(); }); if (message != null) { highlightAll(); } } // Highlight all search strings from HighlightManager private void highlightAll() { HighlighterManager highlighter = HighlighterManager.getInstance(); LinkedList<HighlightSearchEntry> highlights = highlighter.getHighlights(); for (HighlightSearchEntry entry : highlights) { highlightEntryParser(entry); } } // Parse the TextArea data and search the HighlightEntry strings // Highlight all found strings private void highlightEntryParser(HighlightSearchEntry entry) { String text; int lastPos = 0; text = this.getText(); Highlighter hilite = this.getHighlighter(); HighlightPainter painter = new DefaultHighlighter.DefaultHighlightPainter(entry.getColor()); while ((lastPos = text.indexOf(entry.getToken(), lastPos)) > -1) { try { hilite.addHighlight(lastPos, lastPos + entry.getToken().length(), painter); lastPos += entry.getToken().length(); } catch (BadLocationException e) { log.warn("Could not highlight entry", e); } } } @Override // Apply highlights after a setText() public void setText(String s) { super.setText(s); highlightAll(); } public abstract void search(Pattern p, List<SearchMatch> matches); // highlight a specific SearchMatch in the editor public abstract void highlight(SearchMatch sm); protected void highlight(int start, int end) { Highlighter hilite = this.getHighlighter(); HighlightPainter painter = new DefaultHighlighter.DefaultHighlightPainter(Color.LIGHT_GRAY); try { // DOBIN removeAllHighlights(); hilite.addHighlight(start, end, painter); this.setCaretPosition(start); } catch (BadLocationException e) { log.error(e.getMessage(), e); } } public Object highlight(int start, int end, HighlightPainter painter) { try { Object highlightReference = getHighlighter().addHighlight(start, end, painter); this.setCaretPosition(start); return highlightReference; } catch (BadLocationException e) { log.error(e.getMessage(), e); } return null; } public void removeHighlight(Object highlightReference) { getHighlighter().removeHighlight(highlightReference); } private void removeAllHighlights() { Highlighter hilite = this.getHighlighter(); hilite.removeAllHighlights(); } public void setMessage(Message aMessage) { this.message = aMessage; } public Message getMessage() { return message; } public void loadConfiguration(String key, FileConfiguration fileConfiguration) { setAntiAliasingEnabled( fileConfiguration.getBoolean(key + ANTI_ALIASING, this.getAntiAliasingEnabled())); Component c = getParent(); if (c instanceof JViewport) { c = c.getParent(); if (c instanceof RTextScrollPane) { final RTextScrollPane scrollPane = (RTextScrollPane) c; scrollPane.setLineNumbersEnabled( fileConfiguration.getBoolean( key + SHOW_LINE_NUMBERS, scrollPane.getLineNumbersEnabled())); if (isCodeFoldingAllowed()) { setCodeFoldingEnabled( fileConfiguration.getBoolean( key + CODE_FOLDING, this.isCodeFoldingEnabled())); scrollPane.setFoldIndicatorEnabled(this.isCodeFoldingEnabled()); } } } setLineWrap(fileConfiguration.getBoolean(key + WORD_WRAP, this.getLineWrap())); setHighlightCurrentLine( fileConfiguration.getBoolean( key + HIGHLIGHT_CURRENT_LINE, this.getHighlightCurrentLine())); setFadeCurrentLineHighlight( fileConfiguration.getBoolean( key + FADE_CURRENT_HIGHLIGHT_LINE, this.getFadeCurrentLineHighlight())); setWhitespaceVisible( fileConfiguration.getBoolean( key + SHOW_WHITESPACE_CHARACTERS, this.isWhitespaceVisible())); setEOLMarkersVisible( fileConfiguration.getBoolean( key + SHOW_NEWLINE_CHARACTERS, this.getEOLMarkersVisible())); setMarkOccurrences( fileConfiguration.getBoolean(key + MARK_OCCURRENCES, this.getMarkOccurrences())); setRoundedSelectionEdges( fileConfiguration.getBoolean( key + ROUNDED_SELECTION_EDGES, this.getRoundedSelectionEdges())); setBracketMatchingEnabled( fileConfiguration.getBoolean( key + BRACKET_MATCHING, this.isBracketMatchingEnabled())); setAnimateBracketMatching( fileConfiguration.getBoolean( key + ANIMATED_BRACKET_MATCHING, this.getAnimateBracketMatching())); } public void saveConfiguration(String key, FileConfiguration fileConfiguration) { fileConfiguration.setProperty(key + ANTI_ALIASING, this.getAntiAliasingEnabled()); Component c = getParent(); if (c instanceof JViewport) { c = c.getParent(); if (c instanceof RTextScrollPane) { final RTextScrollPane scrollPane = (RTextScrollPane) c; fileConfiguration.setProperty( key + SHOW_LINE_NUMBERS, scrollPane.getLineNumbersEnabled()); if (isCodeFoldingAllowed()) { fileConfiguration.setProperty(key + CODE_FOLDING, this.isCodeFoldingEnabled()); } } } fileConfiguration.setProperty(key + WORD_WRAP, this.getLineWrap()); fileConfiguration.setProperty(key + HIGHLIGHT_CURRENT_LINE, this.getHighlightCurrentLine()); fileConfiguration.setProperty( key + FADE_CURRENT_HIGHLIGHT_LINE, this.getFadeCurrentLineHighlight()); fileConfiguration.setProperty(key + SHOW_WHITESPACE_CHARACTERS, this.isWhitespaceVisible()); fileConfiguration.setProperty(key + SHOW_NEWLINE_CHARACTERS, this.getEOLMarkersVisible()); fileConfiguration.setProperty(key + MARK_OCCURRENCES, this.getMarkOccurrences()); fileConfiguration.setProperty( key + ROUNDED_SELECTION_EDGES, this.getRoundedSelectionEdges()); fileConfiguration.setProperty(key + BRACKET_MATCHING, this.isBracketMatchingEnabled()); fileConfiguration.setProperty( key + ANIMATED_BRACKET_MATCHING, this.getAnimateBracketMatching()); } public Vector<SyntaxStyle> getSyntaxStyles() { return syntaxStyles; } protected void addSyntaxStyle(String label, String styleKey) { syntaxStyles.add(new SyntaxStyle(label, styleKey)); } protected abstract CustomTokenMakerFactory getTokenMakerFactory(); private static synchronized void initActions() { if (syntaxMenu == null) { syntaxMenu = new SyntaxMenu(); viewMenu = new ViewMenu(); undoAction = new TextAreaMenuItem(RTextArea.UNDO_ACTION, true, false); redoAction = new TextAreaMenuItem(RTextArea.REDO_ACTION, false, true); cutAction = new TextAreaMenuItem(RTextArea.CUT_ACTION, false, false); copyAction = new TextAreaMenuItem(RTextArea.COPY_ACTION, false, false); pasteAction = new TextAreaMenuItem(RTextArea.PASTE_ACTION, false, false); deleteAction = new TextAreaMenuItem(RTextArea.DELETE_ACTION, false, true); selectAllAction = new TextAreaMenuItem(RTextArea.SELECT_ALL_ACTION, false, false); final List<JMenuItem> mainPopupMenuItems = View.getSingleton().getPopupList(); mainPopupMenuItems.add(syntaxMenu); mainPopupMenuItems.add(viewMenu); mainPopupMenuItems.add(undoAction); mainPopupMenuItems.add(redoAction); mainPopupMenuItems.add(cutAction); mainPopupMenuItems.add(copyAction); mainPopupMenuItems.add(pasteAction); mainPopupMenuItems.add(deleteAction); mainPopupMenuItems.add(selectAllAction); } } public static class SyntaxStyle { private String label; private String styleKey; public SyntaxStyle(String label, String styleKey) { this.label = label; this.styleKey = styleKey; } public String getLabel() { return label; } public String getStyleKey() { return styleKey; } } protected static class CustomTokenMakerFactory extends AbstractTokenMakerFactory { @Override protected void initTokenMakerMap() { String pkg = "org.fife.ui.rsyntaxtextarea.modes."; putMapping(SYNTAX_STYLE_NONE, pkg + "PlainTextTokenMaker"); } } private static class TextAreaMenuItem extends ExtensionPopupMenuItem { private static final long serialVersionUID = -8369459846515841057L; private int actionId; private boolean precedeWithSeparator; private boolean succeedWithSeparator; public TextAreaMenuItem( int actionId, boolean precedeWithSeparator, boolean succeedWithSeparator) throws IllegalArgumentException { this.actionId = actionId; this.precedeWithSeparator = precedeWithSeparator; this.succeedWithSeparator = succeedWithSeparator; Action action = RTextArea.getAction(actionId); if (action == null) { throw new IllegalArgumentException("Action not found with id: " + actionId); } setAction(action); } @Override public boolean isEnableForComponent(Component invoker) { if (invoker instanceof HttpPanelSyntaxHighlightTextArea) { HttpPanelSyntaxHighlightTextArea httpPanelTextArea = (HttpPanelSyntaxHighlightTextArea) invoker; switch (actionId) { case RTextArea.CUT_ACTION: if (!httpPanelTextArea.isEditable()) { this.setEnabled(false); } break; case RTextArea.DELETE_ACTION: case RTextArea.PASTE_ACTION: this.setEnabled(httpPanelTextArea.isEditable()); break; case RTextArea.SELECT_ALL_ACTION: this.setEnabled(httpPanelTextArea.getDocument().getLength() != 0); break; } return true; } return false; } @Override public boolean precedeWithSeparator() { return precedeWithSeparator; } @Override public boolean succeedWithSeparator() { return succeedWithSeparator; } @Override public boolean isSafe() { return true; } } }
/** * Copyright 2007-2013 Zuse Institute Berlin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.zib.scalaris.examples.wikipedia.data; import java.io.Serializable; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Represents generic site information. * * @author Nico Kruber, kruber@zib.de */ public class SiteInfo implements Serializable { /** * Version for serialisation. */ private static final long serialVersionUID = 1L; protected String base; protected String sitename; protected String generator; protected String caseStr; /** * Maps namespace keys to a map with the following two entries: * <ul> * <li><tt>{@link #NAMESPACE_PREFIX}</tt>: prefix of the namespace</li> * <li><tt>{@link #NAMESPACE_CASE}</tt>: case of the namespace, e.g. "first-letter"</li> * </ul> */ protected Map<String, Map<String, String>> namespaces; /** * Key for getting the namespace prefix in the maps contained in * {@link #namespaces}. * * @see #getNamespaces() */ public final static String NAMESPACE_PREFIX = "prefix"; /** * Key for getting the namespace case in the maps contained in * {@link #namespaces}. * * @see #getNamespaces() */ public final static String NAMESPACE_CASE = "case"; protected static final Pattern MATCH_WIKI_SITE_LANG = Pattern.compile("^http[s]?://([^.]+).*$"); /** * Creates a site info object with the given data. */ public SiteInfo() { this.base = ""; this.sitename = ""; this.generator = ""; this.caseStr = ""; this.namespaces = new HashMap<String, Map<String, String>>(); } /** * Creates a site info object with the given data. * * @param base * the url of the main site * @param sitename * the name of the site * @param generator * the generator of the site (MediaWiki version string) * @param caseStr * the case option of the site * @param namespaces * the namespaces of the site */ public SiteInfo(String base, String sitename, String generator, String caseStr, Map<String, Map<String, String>> namespaces) { this.base = base; this.sitename = sitename; this.generator = generator; this.caseStr = caseStr; this.namespaces = namespaces; } /** * Gets the base URL of the site. * * @return the base URL */ public String getBase() { return base; } /** * Sets the base URL of the site. * * @param base the base URL to set */ public void setBase(String base) { this.base = base; } /** * Extract the language string from {@link #base}. Assumes <tt>en</tt> if no * match is found. * * @return Wikipedia language code * @see #getBase() */ public String extractLang() { String lang = "en"; Matcher matcher = MATCH_WIKI_SITE_LANG.matcher(base); if (matcher.matches()) { lang = matcher.group(1); } return lang; } /** * Extract the locale from the language string from {@link #base}. * * @return locale or <tt>null</tt> if the found language is no valid language code * @see #extractLang() */ public Locale extractLolace() { String lang = extractLang(); String country = ""; int idx = lang.indexOf('_'); // e.g. de_DE if (idx >= 0) { lang = lang.substring(0, idx); country = lang.substring(idx + 1); } try { Locale locale = new Locale(lang, country); // test that the locale is working: locale.getLanguage(); locale.getCountry(); return locale; } catch (Exception e) { } return null; } /** * Gets the site's name. * * @return the sitename */ public String getSitename() { return sitename; } /** * Sets the site's name. * * @param sitename the sitename to set */ public void setSitename(String sitename) { this.sitename = sitename; } /** * Gets the site's generator (MediaWiki version string). * * @return the generator */ public String getGenerator() { return generator; } /** * Sets the site's generator (MediaWiki version string). * * @param generator the generator to set */ public void setGenerator(String generator) { this.generator = generator; } /** * Gets the namespace mapping. * * Maps namespace keys to a map with the following two entries: * <ul> * <li><tt>{@link #NAMESPACE_PREFIX}</tt>: prefix of the namespace</li> * <li><tt>{@link #NAMESPACE_CASE}</tt>: case of the namespace, e.g. "first-letter"</li> * </ul> * * @return the namespace */ public Map<String, Map<String, String>> getNamespaces() { return namespaces; } /** * Sets the namespace mapping. * * @param namespaces the namespace to set */ public void setNamespaces(Map<String, Map<String, String>> namespaces) { this.namespaces = namespaces; } /** * Gets the case option of the site. * * @return the case */ public String getCase() { return caseStr; } /** * Sets the case option of the site. * * @param caseStr the case to set */ public void setCase(String caseStr) { this.caseStr = caseStr; } }
package com.analyticobjects.digitalsafe.database; import com.analyticobjects.utility.ByteUtility; import com.analyticobjects.digitalsafe.exceptions.PassphraseExpiredException; import com.analyticobjects.digitalsafe.crypto.Passphrase; import com.analyticobjects.digitalsafe.crypto.TripleAES; import com.analyticobjects.digitalsafe.exceptions.InvalidPassphraseException; import com.analyticobjects.utility.SerializationUtility; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.logging.Level; import java.util.logging.Logger; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipOutputStream; /** * Manage secure data persistence. The goal of this class is to securely store user data as concisely as possible to * make code reviews as easy as possible. The goal of most databases is to make data highly available for marketing from * a localized server cluster. I call this a seize everything architecture, as hackers and totalitarian governments * simply need to hit up one place to seize all data. The approach taken here is based on noting current palm-top * computers (aka phones) have multiGB multiGhz quad-core power and local storage is the best way to store data. Person * specific data should follow data gravity and stick with the person it is related to. * * TODO: Automate distributed p2p or f2f backup and synchronization with untrustworthy/cloud storage providers. * * @author Joel Bondurant * @since 2013.08 */ public final class SecureDatabase { private final Passphrase passphrase; private final Path dbPath; private static final String MASTER_INDEX = "MASTER_INDEX"; /** * Constructs a new connection to a secure database at the path supplied. * * @param dbPath A path for the database file. */ public SecureDatabase(Path dbPath) { this.dbPath = Paths.get(dbPath.toUri()); this.passphrase = new Passphrase(); this.ensureFile(); } /** * Ensure the db file exists. */ private void ensureFile() { File dbFile = this.dbPath.toFile(); if (!dbFile.exists()) { try { dbFile.createNewFile(); } catch (IOException ex) { Logger.getLogger(SecureDatabase.class.getName()).log(Level.SEVERE, ex.getLocalizedMessage(), ex); } } } /** * Resets the database. */ public void clear() { File dbFile = this.dbFile(); if (dbFile.exists()) { dbFile.delete(); } ensureFile(); } public void lock() { this.passphrase.clear(); } public boolean isLocked() { return this.passphrase.isClear(); } public void setPassphrase(String passphrase) throws InvalidPassphraseException { this.passphrase.setPassphrase(passphrase); if (dbFile().length() == 0) { return; // accept any passphrase for an empty database. } try { this.getMasterIndex(); } catch (PassphraseExpiredException ex) { lock(); Logger.getLogger(SecureDatabase.class.getName()).log(Level.SEVERE, ex.getLocalizedMessage(), ex); } } /** * A File object representation of the database. * * @return A File object representation of the database. */ private File dbFile() { ensureFile(); return this.dbPath.toFile(); } /** * Gets a ZipOutputStream object representing the outer database wrapper. * * @return A zip file output stream for the Passphrase database. * @throws FileNotFoundException */ private ZipOutputStream outZip() throws FileNotFoundException { return new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(dbFile()))); } /** * Gets a ZipFile object representing the outer Passphrase database wrapper. * * @return A zip file archive object for the Passphrase database. * @throws IOException */ private ZipFile zipDbFile() throws IOException { return new ZipFile(dbFile()); } /** * @return True if empty, false ow. */ public boolean isEmpty() { return (dbFile().length() < 1L); } /** * Get the master index from encrypted persistent storage. * * @return A volatile memory version of the master index. * @throws PassphraseExpiredException */ public MasterIndex getMasterIndex() throws PassphraseExpiredException { try ( ZipFile zipFile = zipDbFile(); InputStream masterIndexInStream = zipFile.getInputStream(zipFile.getEntry(MASTER_INDEX));) { byte[] encryptedMasterIndex = ByteUtility.readFully(masterIndexInStream); byte[] decryptedMasterIndex = TripleAES.decrypt(this.passphrase, encryptedMasterIndex); return SerializationUtility.<MasterIndex>inflate(decryptedMasterIndex); } catch (IOException | ClassNotFoundException ex) { Logger.getLogger(SecureDatabase.class.getName()).log(Level.SEVERE, ex.getLocalizedMessage(), ex); } return null; } /** * Persist the master index to non-volatile storage. * * @param masterIndex The volatile memory master index. * @throws PassphraseExpiredException */ public void commitMasterIndex(MasterIndex masterIndex) throws PassphraseExpiredException { try (ZipOutputStream zipOut = outZip();) { // should only be one unmodified file per call, but may need multithreading in future. for (FileTable fileTable : masterIndex.getFileTables()) { Logger.getLogger(SecureDatabase.class.getName()).log(Level.FINE, "Committing File Table: {0}", fileTable.getName()); for (FileTableEntry fileTableEntry : fileTable.getAll()) { if (!fileTableEntry.isSourceAttached()) { continue; } Logger.getLogger(SecureDatabase.class.getName()).log(Level.FINE, "Committing File: {0}", fileTableEntry.getFileName()); zipOut.putNextEntry(new ZipEntry(fileTableEntry.getFileNameHash())); Path sourceFilePath = fileTableEntry.getSourceFilePath(); byte[] fileBytes = ByteUtility.readFully(sourceFilePath); zipOut.write(TripleAES.encrypt(this.passphrase, fileBytes)); zipOut.flush(); zipOut.closeEntry(); fileTableEntry.detachSource(); } } masterIndex.incrementCommitCount(); byte[] encryptedMasterIndex = TripleAES.encrypt(this.passphrase, SerializationUtility.<MasterIndex>deflate(masterIndex)); zipOut.putNextEntry(new ZipEntry(MASTER_INDEX)); zipOut.write(encryptedMasterIndex); zipOut.flush(); zipOut.closeEntry(); zipOut.close(); } catch (IOException ex) { Logger.getLogger(SecureDatabase.class.getName()).log(Level.FINEST, ex.getLocalizedMessage(), ex); } } /** * Load an encrypted file into volatile memory from the database. * * @param fileTableEntry A file table entry record to export. * @return The raw unencrypted file bytes. * @throws com.analyticobjects.digitalsafe.exceptions.PassphraseExpiredException * @throws java.io.IOException */ public byte[] loadFile(FileTableEntry fileTableEntry) throws PassphraseExpiredException, IOException { if (fileTableEntry == null) { return null; } byte[] fileBytes = null; ensureFile(); try { if (Files.size(dbFile().toPath()) < 1L) { return fileBytes; } } catch (IOException ex) { Logger.getLogger(SecureDatabase.class.getName()).log(Level.FINEST, ex.getLocalizedMessage(), ex); throw ex; } byte[] encryptedFile; try ( ZipFile zipFile = zipDbFile(); InputStream fileInStream = zipFile.getInputStream(zipFile.getEntry(fileTableEntry.getFileNameHash()));) { encryptedFile = ByteUtility.readFully(fileInStream); } fileBytes = TripleAES.decrypt(this.passphrase, encryptedFile); return fileBytes; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.rel.type; import org.apache.calcite.avatica.util.TimeUnit; import org.apache.calcite.sql.SqlCollation; import org.apache.calcite.sql.SqlIntervalQualifier; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.sql.validate.SqlValidatorUtil; import org.checkerframework.checker.nullness.qual.Nullable; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; /** * RelDataTypeFactory is a factory for datatype descriptors. It defines methods * for instantiating and combining SQL, Java, and collection types. The factory * also provides methods for return type inference for arithmetic in cases where * SQL 2003 is implementation defined or impractical. * * <p>This interface is an example of the * {@link org.apache.calcite.util.Glossary#ABSTRACT_FACTORY_PATTERN abstract factory pattern}. * Any implementation of <code>RelDataTypeFactory</code> must ensure that type * objects are canonical: two types are equal if and only if they are * represented by the same Java object. This reduces memory consumption and * comparison cost. */ public interface RelDataTypeFactory { //~ Methods ---------------------------------------------------------------- /** * Returns the type system. * * @return Type system */ RelDataTypeSystem getTypeSystem(); /** * Creates a type that corresponds to a Java class. * * @param clazz the Java class used to define the type * @return canonical Java type descriptor */ RelDataType createJavaType(Class clazz); /** * Creates a cartesian product type. * * @return canonical join type descriptor * @param types array of types to be joined */ RelDataType createJoinType(RelDataType... types); /** * Creates a type that represents a structured collection of fields, given * lists of the names and types of the fields. * * @param kind Name resolution policy * @param typeList types of the fields * @param fieldNameList names of the fields * @return canonical struct type descriptor */ RelDataType createStructType(StructKind kind, List<RelDataType> typeList, List<String> fieldNameList); /** Creates a type that represents a structured collection of fields. * Shorthand for <code>createStructType(StructKind.FULLY_QUALIFIED, typeList, * fieldNameList)</code>. */ RelDataType createStructType( List<RelDataType> typeList, List<String> fieldNameList); /** * Creates a type that represents a structured collection of fields, * obtaining the field information via a callback. * * @param fieldInfo callback for field information * @return canonical struct type descriptor */ @Deprecated // to be removed before 2.0 RelDataType createStructType(FieldInfo fieldInfo); /** * Creates a type that represents a structured collection of fieldList, * obtaining the field information from a list of (name, type) pairs. * * @param fieldList List of (name, type) pairs * @return canonical struct type descriptor */ RelDataType createStructType( List<? extends Map.Entry<String, RelDataType>> fieldList); /** * Creates an array type. Arrays are ordered collections of elements. * * @param elementType type of the elements of the array * @param maxCardinality maximum array size, or -1 for unlimited * @return canonical array type descriptor */ RelDataType createArrayType( RelDataType elementType, long maxCardinality); /** * Creates a map type. Maps are unordered collections of key/value pairs. * * @param keyType type of the keys of the map * @param valueType type of the values of the map * @return canonical map type descriptor */ RelDataType createMapType( RelDataType keyType, RelDataType valueType); /** * Creates a multiset type. Multisets are unordered collections of elements. * * @param elementType type of the elements of the multiset * @param maxCardinality maximum collection size, or -1 for unlimited * @return canonical multiset type descriptor */ RelDataType createMultisetType( RelDataType elementType, long maxCardinality); /** * Duplicates a type, making a deep copy. Normally, this is a no-op, since * canonical type objects are returned. However, it is useful when copying a * type from one factory to another. * * @param type input type * @return output type, a new object equivalent to input type */ RelDataType copyType(RelDataType type); /** * Creates a type that is the same as another type but with possibly * different nullability. The output type may be identical to the input * type. For type systems without a concept of nullability, the return value * is always the same as the input. * * @param type input type * @param nullable true to request a nullable type; false to request a NOT * NULL type * @return output type, same as input type except with specified nullability * @throws NullPointerException if type is null */ RelDataType createTypeWithNullability( RelDataType type, boolean nullable); /** * Creates a type that is the same as another type but with possibly * different charset or collation. For types without a concept of charset or * collation this function must throw an error. * * @param type input type * @param charset charset to assign * @param collation collation to assign * @return output type, same as input type except with specified charset and * collation */ RelDataType createTypeWithCharsetAndCollation( RelDataType type, Charset charset, SqlCollation collation); /** Returns the default {@link Charset} (valid if this is a string type). */ Charset getDefaultCharset(); /** * Returns the most general of a set of types (that is, one type to which * they can all be cast), or null if conversion is not possible. The result * may be a new type that is less restrictive than any of the input types, * e.g. <code>leastRestrictive(INT, NUMERIC(3, 2))</code> could be * {@code NUMERIC(12, 2)}. * * @param types input types to be combined using union (not null, not empty) * @return canonical union type descriptor */ @Nullable RelDataType leastRestrictive(List<RelDataType> types); /** * Creates a SQL type with no precision or scale. * * @param typeName Name of the type, for example {@link SqlTypeName#BOOLEAN}, * never null * @return canonical type descriptor */ RelDataType createSqlType(SqlTypeName typeName); /** * Creates a SQL type that represents the "unknown" type. * It is only equal to itself, and is distinct from the NULL type. * @return unknown type */ RelDataType createUnknownType(); /** * Creates a SQL type with length (precision) but no scale. * * @param typeName Name of the type, for example {@link SqlTypeName#VARCHAR}. * Never null. * @param precision Maximum length of the value (non-numeric types) or the * precision of the value (numeric/datetime types). * Must be non-negative or * {@link RelDataType#PRECISION_NOT_SPECIFIED}. * @return canonical type descriptor */ RelDataType createSqlType( SqlTypeName typeName, int precision); /** * Creates a SQL type with precision and scale. * * @param typeName Name of the type, for example {@link SqlTypeName#DECIMAL}. * Never null. * @param precision Precision of the value. * Must be non-negative or * {@link RelDataType#PRECISION_NOT_SPECIFIED}. * @param scale scale of the values, i.e. the number of decimal places to * shift the value. For example, a NUMBER(10,3) value of * "123.45" is represented "123450" (that is, multiplied by * 10^3). A negative scale <em>is</em> valid. * @return canonical type descriptor */ RelDataType createSqlType( SqlTypeName typeName, int precision, int scale); /** * Creates a SQL interval type. * * @param intervalQualifier contains information if it is a year-month or a * day-time interval along with precision information * @return canonical type descriptor */ RelDataType createSqlIntervalType( SqlIntervalQualifier intervalQualifier); /** * Infers the return type of a decimal multiplication. Decimal * multiplication involves at least one decimal operand and requires both * operands to have exact numeric types. * * @param type1 type of the first operand * @param type2 type of the second operand * @return the result type for a decimal multiplication, or null if decimal * multiplication should not be applied to the operands. * @deprecated Use * {@link RelDataTypeSystem#deriveDecimalMultiplyType(RelDataTypeFactory, RelDataType, RelDataType)} */ @Deprecated // to be removed before 2.0 @Nullable RelDataType createDecimalProduct( RelDataType type1, RelDataType type2); /** * Returns whether a decimal multiplication should be implemented by casting * arguments to double values. * * <p>Pre-condition: <code>createDecimalProduct(type1, type2) != null</code> * * @deprecated Use * {@link RelDataTypeSystem#shouldUseDoubleMultiplication(RelDataTypeFactory, RelDataType, RelDataType)} */ @Deprecated // to be removed before 2.0 boolean useDoubleMultiplication( RelDataType type1, RelDataType type2); /** * Infers the return type of a decimal division. Decimal division involves * at least one decimal operand and requires both operands to have exact * numeric types. * * @param type1 type of the first operand * @param type2 type of the second operand * @return the result type for a decimal division, or null if decimal * division should not be applied to the operands. * * @deprecated Use * {@link RelDataTypeSystem#deriveDecimalDivideType(RelDataTypeFactory, RelDataType, RelDataType)} */ @Deprecated // to be removed before 2.0 @Nullable RelDataType createDecimalQuotient( RelDataType type1, RelDataType type2); /** * Create a decimal type equivalent to the numeric {@code type}, * this is related to specific system implementation, * you can override this logic if it is required. * * @param type the numeric type to create decimal type with * @return decimal equivalence of the numeric type. */ RelDataType decimalOf(RelDataType type); /** * Creates a * {@link org.apache.calcite.rel.type.RelDataTypeFactory.FieldInfoBuilder}. * But since {@code FieldInfoBuilder} is deprecated, we recommend that you use * its base class {@link Builder}, which is not deprecated. */ @SuppressWarnings("deprecation") FieldInfoBuilder builder(); //~ Inner Interfaces ------------------------------------------------------- /** * Callback that provides enough information to create fields. */ @Deprecated // to be removed before 2.0 interface FieldInfo { /** * Returns the number of fields. * * @return number of fields */ int getFieldCount(); /** * Returns the name of a given field. * * @param index Ordinal of field * @return Name of given field */ String getFieldName(int index); /** * Returns the type of a given field. * * @param index Ordinal of field * @return Type of given field */ RelDataType getFieldType(int index); } /** * Implementation of {@link FieldInfo} that provides a fluid API to build * a list of fields. */ @Deprecated @SuppressWarnings("deprecation") class FieldInfoBuilder extends Builder implements FieldInfo { public FieldInfoBuilder(RelDataTypeFactory typeFactory) { super(typeFactory); } @Override public FieldInfoBuilder add(String name, RelDataType type) { return (FieldInfoBuilder) super.add(name, type); } @Override public FieldInfoBuilder add(String name, SqlTypeName typeName) { return (FieldInfoBuilder) super.add(name, typeName); } @Override public FieldInfoBuilder add(String name, SqlTypeName typeName, int precision) { return (FieldInfoBuilder) super.add(name, typeName, precision); } @Override public FieldInfoBuilder add(String name, SqlTypeName typeName, int precision, int scale) { return (FieldInfoBuilder) super.add(name, typeName, precision, scale); } @Override public FieldInfoBuilder add(String name, TimeUnit startUnit, int startPrecision, TimeUnit endUnit, int fractionalSecondPrecision) { return (FieldInfoBuilder) super.add(name, startUnit, startPrecision, endUnit, fractionalSecondPrecision); } @Override public FieldInfoBuilder nullable(boolean nullable) { return (FieldInfoBuilder) super.nullable(nullable); } @Override public FieldInfoBuilder add(RelDataTypeField field) { return (FieldInfoBuilder) super.add(field); } @Override public FieldInfoBuilder addAll( Iterable<? extends Map.Entry<String, RelDataType>> fields) { return (FieldInfoBuilder) super.addAll(fields); } @Override public FieldInfoBuilder kind(StructKind kind) { return (FieldInfoBuilder) super.kind(kind); } @Override public FieldInfoBuilder uniquify() { return (FieldInfoBuilder) super.uniquify(); } } /** Fluid API to build a list of fields. */ class Builder { private final List<String> names = new ArrayList<>(); private final List<RelDataType> types = new ArrayList<>(); private StructKind kind = StructKind.FULLY_QUALIFIED; private final RelDataTypeFactory typeFactory; private boolean nullableRecord = false; /** * Creates a Builder with the given type factory. */ public Builder(RelDataTypeFactory typeFactory) { this.typeFactory = Objects.requireNonNull(typeFactory); } /** * Returns the number of fields. * * @return number of fields */ public int getFieldCount() { return names.size(); } /** * Returns the name of a given field. * * @param index Ordinal of field * @return Name of given field */ public String getFieldName(int index) { return names.get(index); } /** * Returns the type of a given field. * * @param index Ordinal of field * @return Type of given field */ public RelDataType getFieldType(int index) { return types.get(index); } /** * Adds a field with given name and type. */ public Builder add(String name, RelDataType type) { names.add(name); types.add(type); return this; } /** * Adds a field with a type created using * {@link org.apache.calcite.rel.type.RelDataTypeFactory#createSqlType(org.apache.calcite.sql.type.SqlTypeName)}. */ public Builder add(String name, SqlTypeName typeName) { add(name, typeFactory.createSqlType(typeName)); return this; } /** * Adds a field with a type created using * {@link org.apache.calcite.rel.type.RelDataTypeFactory#createSqlType(org.apache.calcite.sql.type.SqlTypeName, int)}. */ public Builder add(String name, SqlTypeName typeName, int precision) { add(name, typeFactory.createSqlType(typeName, precision)); return this; } /** * Adds a field with a type created using * {@link org.apache.calcite.rel.type.RelDataTypeFactory#createSqlType(org.apache.calcite.sql.type.SqlTypeName, int, int)}. */ public Builder add(String name, SqlTypeName typeName, int precision, int scale) { add(name, typeFactory.createSqlType(typeName, precision, scale)); return this; } /** * Adds a field with an interval type. */ public Builder add(String name, TimeUnit startUnit, int startPrecision, TimeUnit endUnit, int fractionalSecondPrecision) { final SqlIntervalQualifier q = new SqlIntervalQualifier(startUnit, startPrecision, endUnit, fractionalSecondPrecision, SqlParserPos.ZERO); add(name, typeFactory.createSqlIntervalType(q)); return this; } /** * Changes the nullability of the last field added. * * @throws java.lang.IndexOutOfBoundsException if no fields have been * added */ public Builder nullable(boolean nullable) { RelDataType lastType = types.get(types.size() - 1); if (lastType.isNullable() != nullable) { final RelDataType type = typeFactory.createTypeWithNullability(lastType, nullable); types.set(types.size() - 1, type); } return this; } /** * Adds a field. Field's ordinal is ignored. */ public Builder add(RelDataTypeField field) { add(field.getName(), field.getType()); return this; } /** * Adds all fields in a collection. */ public Builder addAll( Iterable<? extends Map.Entry<String, RelDataType>> fields) { for (Map.Entry<String, RelDataType> field : fields) { add(field.getKey(), field.getValue()); } return this; } public Builder kind(StructKind kind) { this.kind = kind; return this; } /** Sets whether the record type will be nullable. */ public Builder nullableRecord(boolean nullableRecord) { this.nullableRecord = nullableRecord; return this; } /** * Makes sure that field names are unique. */ public Builder uniquify() { final List<String> uniqueNames = SqlValidatorUtil.uniquify(names, typeFactory.getTypeSystem().isSchemaCaseSensitive()); if (uniqueNames != names) { names.clear(); names.addAll(uniqueNames); } return this; } /** * Creates a struct type with the current contents of this builder. */ public RelDataType build() { return typeFactory.createTypeWithNullability( typeFactory.createStructType(kind, types, names), nullableRecord); } /** Creates a dynamic struct type with the current contents of this * builder. */ public RelDataType buildDynamic() { final RelDataType dynamicType = new DynamicRecordTypeImpl(typeFactory); final RelDataType type = build(); dynamicType.getFieldList().addAll(type.getFieldList()); return dynamicType; } /** Returns whether a field exists with the given name. */ public boolean nameExists(String name) { return names.contains(name); } } }
/** * Appcelerator Titanium Mobile * Copyright (c) 2009-2013 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the Apache Public License * Please see the LICENSE included with this distribution for details. */ package org.appcelerator.titanium.proxy; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.appcelerator.kroll.KrollDict; import org.appcelerator.kroll.KrollProxy; import org.appcelerator.kroll.annotations.Kroll; import org.appcelerator.kroll.common.Log; import org.appcelerator.titanium.io.TiFileProvider; import org.appcelerator.titanium.TiApplication; import org.appcelerator.titanium.TiBlob; import org.appcelerator.titanium.TiC; import org.appcelerator.titanium.util.TiConvert; import android.content.ClipData; import android.graphics.Bitmap; import android.content.ComponentName; import android.content.Intent; import android.net.Uri; import android.os.Build; import android.text.TextUtils; @Kroll.proxy(propertyAccessors = { TiC.PROPERTY_URL }) /** * This is a proxy representation of the Android Intent type. * Refer to <a href="http://developer.android.com/reference/android/content/Intent.html">Android Intent</a> * for more details. */ public class IntentProxy extends KrollProxy { private static final String TAG = "TiIntent"; public static final int TYPE_ACTIVITY = 0; public static final int TYPE_SERVICE = 1; public static final int TYPE_BROADCAST = 2; protected Intent intent; protected int type = TYPE_ACTIVITY; public IntentProxy() { } public IntentProxy(Intent intent) { this.intent = intent; } // clang-format off @Kroll.method @Kroll.getProperty public String getPackageName() // clang-format on { if (intent == null) { return null; } ComponentName componentName = intent.getComponent(); if (componentName != null) { return componentName.getPackageName(); } return null; } // clang-format off @Kroll.method @Kroll.getProperty public String getClassName() // clang-format on { if (intent == null) { return null; } ComponentName componentName = intent.getComponent(); if (componentName != null) { return componentName.getClassName(); } return null; } protected static char[] escapeChars = new char[] { '\\', '/', ' ', '.', '$', '&', '@' }; protected static String getURLClassName(String url, int type) { switch (type) { case TYPE_ACTIVITY: return getURLClassName(url, "Activity"); case TYPE_SERVICE: return getURLClassName(url, "Service"); case TYPE_BROADCAST: return getURLClassName(url, "Broadcast"); } return null; } protected static String getURLClassName(String url, String appendage) { List<String> parts = Arrays.asList(url.split("/")); if (parts.size() == 0) return null; int start = 0; if (parts.get(0).equals("app:") && parts.size() >= 3) { start = 2; } String className = TextUtils.join("_", parts.subList(start, parts.size())); if (className.endsWith(".js")) { className = className.substring(0, className.length() - 3); } if (className.length() > 1) { className = className.substring(0, 1).toUpperCase() + className.substring(1); } else { className = className.toUpperCase(); } for (char escapeChar : escapeChars) { className = className.replace(escapeChar, '_'); } return className + appendage; } public void handleCreationDict(KrollDict dict) { super.handleCreationDict(dict); intent = new Intent(); // See which set of options we have to work with. String action = dict.getString(TiC.PROPERTY_ACTION); String url = dict.getString(TiC.PROPERTY_URL); String data = dict.getString(TiC.PROPERTY_DATA); String className = dict.getString(TiC.PROPERTY_CLASS_NAME); String packageName = dict.getString(TiC.PROPERTY_PACKAGE_NAME); String type = dict.getString(TiC.PROPERTY_TYPE); int flags = 0; if (dict.containsKey(TiC.PROPERTY_FLAGS)) { flags = TiConvert.toInt(dict, TiC.PROPERTY_FLAGS); Log.d(TAG, "Setting flags: " + Integer.toString(flags), Log.DEBUG_MODE); intent.setFlags(flags); } else { setProperty(TiC.PROPERTY_FLAGS, intent.getFlags()); } if (action != null) { Log.d(TAG, "Setting action: " + action, Log.DEBUG_MODE); intent.setAction(action); } if (packageName != null) { Log.d(TAG, "Setting package: " + packageName, Log.DEBUG_MODE); intent.setPackage(packageName); } if (url != null) { Log.d(TAG, "Creating intent for JS Activity/Service @ " + url, Log.DEBUG_MODE); packageName = TiApplication.getInstance().getPackageName(); className = packageName + "." + getURLClassName(url, this.type); } if (className != null) { if (packageName != null) { Log.d(TAG, "Both className and packageName set, using intent.setClassName(packageName, className", Log.DEBUG_MODE); intent.setClassName(packageName, className); } else { try { Class<?> c = getClass().getClassLoader().loadClass(className); intent.setClass(TiApplication.getInstance().getApplicationContext(), c); } catch (ClassNotFoundException e) { Log.e(TAG, "Unable to locate class for name: " + className); throw new IllegalStateException("Missing class for name: " + className, e); } } } if (type == null) { if (action != null && action.equals(Intent.ACTION_SEND)) { type = "text/plain"; } } // setType and setData are inexplicably intertwined // calling setType by itself clears the type and vice-versa // if you have both you _must_ call setDataAndType if (data != null) { Uri dataUri = null; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && data.startsWith("file://")) { intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION); dataUri = TiFileProvider.createUriFrom(data); } else { dataUri = Uri.parse(data); } if (type != null) { Log.d(TAG, "setting type: " + type, Log.DEBUG_MODE); intent.setDataAndType(dataUri, type); } else { intent.setData(dataUri); } } else { intent.setType(type); } } @Kroll.method public void putExtra(String key, Object value) { if (value == null) { return; } if (value instanceof String) { intent.putExtra(key, (String) value); } else if (value instanceof Boolean) { intent.putExtra(key, (Boolean) value); } else if (value instanceof Double) { intent.putExtra(key, (Double) value); } else if (value instanceof Integer) { intent.putExtra(key, (Integer) value); } else if (value instanceof Long) { intent.putExtra(key, (Long) value); } else if (value instanceof IntentProxy) { intent.putExtra(key, (Intent) ((IntentProxy) value).getIntent()); } else if (value instanceof TiBlob) { intent.putExtra(key, ((TiBlob) value).getImage()); } else if (value instanceof Object[]) { try { Object[] objVal = (Object[]) value; String[] stringArray = Arrays.copyOf(objVal, objVal.length, String[].class); intent.putExtra(key, stringArray); } catch (Exception ex) { Log.e(TAG, "Error unimplemented put conversion ", ex.getMessage()); } } else { Log.w(TAG, "Warning unimplemented put conversion for " + value.getClass().getCanonicalName() + " trying String"); intent.putExtra(key, TiConvert.toString(value)); } } @Kroll.method public void addFlags(int flags) { intent.addFlags(flags); } // clang-format off @Kroll.method @Kroll.setProperty public void setFlags(int flags) // clang-format on { intent.setFlags(flags); } // clang-format off @Kroll.method @Kroll.getProperty public int getFlags() // clang-format on { return intent.getFlags(); } @Kroll.method public void putExtraUri(String key, Object value) { if (value == null) { return; } if (value instanceof String) { String extraString = (String) value; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && extraString.startsWith("file://")) { Uri contentUri = TiFileProvider.createUriFrom(extraString); ClipData clipData = ClipData.newRawUri("FILE", contentUri); intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION); intent.setClipData(clipData); intent.putExtra(key, contentUri); } else { intent.putExtra(key, Uri.parse(extraString)); } } else if (value instanceof Object[]) { try { Object[] objVal = (Object[]) value; String[] stringArray = Arrays.copyOf(objVal, objVal.length, String[].class); ArrayList<Uri> imageUris = new ArrayList<Uri>(); ClipData clipData = null; for (String s : stringArray) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && s.startsWith("file://")) { Uri contentUri = TiFileProvider.createUriFrom(s); imageUris.add(contentUri); if (clipData == null) { clipData = ClipData.newRawUri("FILES", contentUri); } else { clipData.addItem(new ClipData.Item(contentUri)); } } else { imageUris.add(Uri.parse(s)); } } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION); intent.setClipData(clipData); } intent.putParcelableArrayListExtra(key, imageUris); } catch (Exception ex) { Log.e(TAG, "Error unimplemented put conversion ", ex.getMessage()); } } } @Kroll.method public void addCategory(String category) { if (category != null) { Log.d(TAG, "Adding category: " + category, Log.DEBUG_MODE); intent.addCategory(category); } } @Kroll.method public String getStringExtra(String name) { if (!intent.hasExtra(name)) { return null; } String result = intent.getStringExtra(name); if (result == null) { // One more try as parcelable extra, such as when it's a Uri. // We can't really support getParcelableExtra(name) by itself, // since the type of object coming out of it is unknown and // might not make its way successfully over to Javascript. // By getting it as a string, we at least allow people to grab // Uris (Intent.STREAM) stored as parcelable extras, which is a // very common use case. Object parcelable = intent.getParcelableExtra(name); if (parcelable != null) { result = parcelable.toString(); } } return result; } @Kroll.method public boolean getBooleanExtra(String name, boolean defaultValue) { return intent.getBooleanExtra(name, defaultValue); } @Kroll.method public int getIntExtra(String name, int defaultValue) { return intent.getIntExtra(name, defaultValue); } @Kroll.method public long getLongExtra(String name, long defaultValue) { return intent.getLongExtra(name, defaultValue); } @Kroll.method public double getDoubleExtra(String name, double defaultValue) { return intent.getDoubleExtra(name, defaultValue); } @Kroll.method public TiBlob getBlobExtra(String name) { InputStream is = null; ByteArrayOutputStream bos = null; try { Object returnData = intent.getExtras().getParcelable(name); if (returnData instanceof Uri) { Uri uri = (Uri) returnData; is = TiApplication.getInstance().getContentResolver().openInputStream(uri); bos = new ByteArrayOutputStream(); int len; int size = 4096; byte[] buf = new byte[size]; while ((len = is.read(buf, 0, size)) != -1) { bos.write(buf, 0, len); } buf = bos.toByteArray(); return TiBlob.blobFromData(buf); } else if (returnData instanceof Bitmap) { Bitmap returnBitmapData = (Bitmap) returnData; return TiBlob.blobFromImage(returnBitmapData); } } catch (Exception e) { Log.e(TAG, "Error getting blob extra: " + e.getMessage(), e); return null; } finally { if (is != null) { try { is.close(); } catch (IOException e) { Log.e(TAG, e.getMessage(), Log.DEBUG_MODE); } } if (bos != null) { try { bos.close(); } catch (IOException e) { Log.e(TAG, e.getMessage(), Log.DEBUG_MODE); } } } return null; } // clang-format off @Kroll.method @Kroll.getProperty public String getData() // clang-format on { return intent.getDataString(); } /** * @return the associated intent. */ public Intent getIntent() { return intent; } // clang-format off @Kroll.method @Kroll.getProperty public String getType() // clang-format on { return intent.getType(); } // clang-format off @Kroll.method @Kroll.setProperty public void setType(String type) // clang-format on { intent.setType(type); } // clang-format off @Kroll.method @Kroll.getProperty public String getAction() // clang-format on { return intent.getAction(); } // clang-format off @Kroll.method @Kroll.setProperty public void setAction(String action) // clang-format on { intent.setAction(action); } /** * @return intent type for internal purposes (TYPE_ACTIVITY, etc.) */ public int getInternalType() { return type; } /** * Sets the intent type. * @param type the intent type for internal purposes (TYPE_ACTIVITY etc.) */ public void setInternalType(int type) { this.type = type; } @Kroll.method public boolean hasExtra(String name) { if (intent != null) { return intent.hasExtra(name); } return false; } @Override public String getApiName() { return "Ti.Android.Intent"; } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.cloudfront.model; import java.io.Serializable; /** * <p> * A complex type that specifies the whitelisted cookies, if any, that * you want CloudFront to forward to your origin that is associated with * this cache behavior. * </p> */ public class CookieNames implements Serializable, Cloneable { /** * The number of whitelisted cookies for this cache behavior. */ private Integer quantity; /** * Optional: A complex type that contains whitelisted cookies for this * cache behavior. If Quantity is 0, you can omit Items. */ private com.amazonaws.internal.ListWithAutoConstructFlag<String> items; /** * The number of whitelisted cookies for this cache behavior. * * @return The number of whitelisted cookies for this cache behavior. */ public Integer getQuantity() { return quantity; } /** * The number of whitelisted cookies for this cache behavior. * * @param quantity The number of whitelisted cookies for this cache behavior. */ public void setQuantity(Integer quantity) { this.quantity = quantity; } /** * The number of whitelisted cookies for this cache behavior. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param quantity The number of whitelisted cookies for this cache behavior. * * @return A reference to this updated object so that method calls can be chained * together. */ public CookieNames withQuantity(Integer quantity) { this.quantity = quantity; return this; } /** * Optional: A complex type that contains whitelisted cookies for this * cache behavior. If Quantity is 0, you can omit Items. * * @return Optional: A complex type that contains whitelisted cookies for this * cache behavior. If Quantity is 0, you can omit Items. */ public java.util.List<String> getItems() { if (items == null) { items = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(); items.setAutoConstruct(true); } return items; } /** * Optional: A complex type that contains whitelisted cookies for this * cache behavior. If Quantity is 0, you can omit Items. * * @param items Optional: A complex type that contains whitelisted cookies for this * cache behavior. If Quantity is 0, you can omit Items. */ public void setItems(java.util.Collection<String> items) { if (items == null) { this.items = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<String> itemsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(items.size()); itemsCopy.addAll(items); this.items = itemsCopy; } /** * Optional: A complex type that contains whitelisted cookies for this * cache behavior. If Quantity is 0, you can omit Items. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setItems(java.util.Collection)} or {@link * #withItems(java.util.Collection)} if you want to override the existing * values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param items Optional: A complex type that contains whitelisted cookies for this * cache behavior. If Quantity is 0, you can omit Items. * * @return A reference to this updated object so that method calls can be chained * together. */ public CookieNames withItems(String... items) { if (getItems() == null) setItems(new java.util.ArrayList<String>(items.length)); for (String value : items) { getItems().add(value); } return this; } /** * Optional: A complex type that contains whitelisted cookies for this * cache behavior. If Quantity is 0, you can omit Items. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param items Optional: A complex type that contains whitelisted cookies for this * cache behavior. If Quantity is 0, you can omit Items. * * @return A reference to this updated object so that method calls can be chained * together. */ public CookieNames withItems(java.util.Collection<String> items) { if (items == null) { this.items = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<String> itemsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(items.size()); itemsCopy.addAll(items); this.items = itemsCopy; } return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getQuantity() != null) sb.append("Quantity: " + getQuantity() + ","); if (getItems() != null) sb.append("Items: " + getItems() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getQuantity() == null) ? 0 : getQuantity().hashCode()); hashCode = prime * hashCode + ((getItems() == null) ? 0 : getItems().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CookieNames == false) return false; CookieNames other = (CookieNames)obj; if (other.getQuantity() == null ^ this.getQuantity() == null) return false; if (other.getQuantity() != null && other.getQuantity().equals(this.getQuantity()) == false) return false; if (other.getItems() == null ^ this.getItems() == null) return false; if (other.getItems() != null && other.getItems().equals(this.getItems()) == false) return false; return true; } @Override public CookieNames clone() { try { return (CookieNames) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package de.tbressler.quadratum.logic.players; import de.tbressler.quadratum.logic.ILogicCallback; import de.tbressler.quadratum.model.IReadOnlyGameBoard; import de.tbressler.quadratum.model.Player; import java.util.Random; import static com.google.common.base.MoreObjects.toStringHelper; import static de.tbressler.quadratum.utils.GameBoardUtils.assertIndex; import static de.tbressler.quadratum.utils.SquareUtils.getPossiblePieces; import static de.tbressler.quadratum.utils.SquareUtils.score; import static java.util.Objects.requireNonNull; /** * The implementation of the player logic interface for artificial intelligence players (bots). * * @author Tobias Bressler * @version 1.0 */ public class BotPlayerLogic extends AbstractPlayerLogic { /** Enum for different bot strategies. */ enum Strategy { /** Finds best index by adding all square scores to heat map. */ LONG_TERM, /** Finds best index by using best square score for heat map. */ SHORT_TERM } /* The strategy that should be used by the bot. */ private final Strategy strategy; /* True if the moves of the player should be randomized. */ private boolean randomizeMoves = true; /* Random number generator. */ private Random random = new Random(); /** * Creates the bot player logic. * * @param player The player, must not be null. * @param strategy The strategy, must not be null. */ public BotPlayerLogic(Player player, Strategy strategy) { super(player); this.strategy = requireNonNull(strategy); } /** * Enables or disables randomization of moves. If randomization is enabled, the bot makes a random * decision which field he uses for his next move if the chance for a good score is the same. * * @param randomizeMoves True if moves should be randomized. */ public void setRandomizeMoves(boolean randomizeMoves) { this.randomizeMoves = randomizeMoves; } /** * Sets the random number generator. This method should only be used for testing purposes. * * @param random The random number generator, must not be null. */ void setRandom(Random random) { this.random = requireNonNull(random); } @Override public void requestMove(IReadOnlyGameBoard gameBoard, ILogicCallback callback) { requireNonNull(gameBoard); requireNonNull(callback); int[] playerHeatMap = new int[64]; int[] opponentHeatMap = new int[64]; Player[] pieces = new Player[4]; int[] possible; int scoreForSquare; int playerScore; int opponentScore; int numberOfPlayerPieces; int numberOfOpponentPieces; // Create heat maps for player and opponent: for (int i = 0; i < 55; i++) { pieces[0] = gameBoard.getPiece(i); for (int j = i + 1; j < 64; j++) { pieces[1] = gameBoard.getPiece(j); possible = getPossiblePieces(i, j); if (possible.length != 2) continue; pieces[2] = gameBoard.getPiece(possible[0]); pieces[3] = gameBoard.getPiece(possible[1]); numberOfPlayerPieces = 0; numberOfOpponentPieces = 0; for (int p = 0; p < 4; p++) if (pieces[p] == getPlayer()) numberOfPlayerPieces++; else if (pieces[p] != null) numberOfOpponentPieces++; // Calculate possible score of square: scoreForSquare = score(i, j, possible[0], possible[1]); if ((numberOfOpponentPieces > 0) && (numberOfPlayerPieces == 0)) { // ... square is not occupied by opponent and not yet blocked by player. // Calculate chance for opponent to get this square. opponentScore = scoreForSquare * (numberOfOpponentPieces+1); // Update opponent heat map: updateHeatMap(opponentHeatMap, i, opponentScore); updateHeatMap(opponentHeatMap, j, opponentScore); updateHeatMap(opponentHeatMap, possible[0], opponentScore); updateHeatMap(opponentHeatMap, possible[1], opponentScore); } else if (numberOfOpponentPieces == 0) { // ... square is not blocked by opponent. // Calculate chance for player to get this square. playerScore = scoreForSquare * (numberOfPlayerPieces+1); // Update player heat map: updateHeatMap(playerHeatMap, i, playerScore); updateHeatMap(playerHeatMap, j, playerScore); updateHeatMap(playerHeatMap, possible[0], playerScore); updateHeatMap(playerHeatMap, possible[1], playerScore); } } } int value; int maxValue = -1; int indexWithMaxValue = -1; // Analyze heat map: for(int i = 0; i < 64; i++) { // Skip if field is not empty. if (!gameBoard.isFieldEmpty(i)) continue; // Check chances to score: if (playerHeatMap[i] >= opponentHeatMap[i]) { // ... the chance for a player score is higher or equal. value = playerHeatMap[i]; } else { // ... the chance for a opponent score is higher. value = opponentHeatMap[i]; } // Check if chance is higher: if ((value > maxValue) || ((value == maxValue) && doRandomization())) { maxValue = value; indexWithMaxValue = i; } } assertIndex(indexWithMaxValue, "Bot logic error! Invalid field index."); callback.makeMove(indexWithMaxValue, getPlayer()); } /* Updates the heat map at the given index with the score. */ private void updateHeatMap(int[] heatMap, int index, int score) { heatMap[index] = calculateNewScore(heatMap[index], score); } /* Calculates the new score for the heat map.*/ private int calculateNewScore(int heatMapValue, int currentScore) { switch (strategy) { case LONG_TERM: return heatMapValue + currentScore; case SHORT_TERM: return (currentScore > heatMapValue) ? currentScore : heatMapValue; default: throw new IllegalStateException("Unknown strategy!"); } } /* Returns true, if the values should be randomized. */ private boolean doRandomization() { return randomizeMoves && random.nextBoolean(); } @Override public String toString() { return toStringHelper(this) .add("strategy", strategy) .add("randomizeMoves", randomizeMoves) .toString(); } }
/* * * Autopsy Forensic Browser * * Copyright 2012-2019 Basis Technology Corp. * * Copyright 2012 42six Solutions. * Contact: aebadirad <at> 42six <dot> com * Project Contact/Architect: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.recentactivity; import java.io.File; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; import org.sleuthkit.datamodel.Content; import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult; import org.sleuthkit.autopsy.ingest.IngestJobContext; /** * Recent activity image ingest module */ public final class RAImageIngestModule implements DataSourceIngestModule { private static final Logger logger = Logger.getLogger(RAImageIngestModule.class.getName()); private final List<Extract> extractors = new ArrayList<>(); private final List<Extract> browserExtractors = new ArrayList<>(); private IngestServices services = IngestServices.getInstance(); private IngestJobContext context; private StringBuilder subCompleted = new StringBuilder(); RAImageIngestModule() { } @Override public void startUp(IngestJobContext context) throws IngestModuleException { this.context = context; Extract iexplore; Extract edge; try { iexplore = new ExtractIE(); edge = new ExtractEdge(); } catch (NoCurrentCaseException ex) { throw new IngestModuleException(ex.getMessage(), ex); } Extract registry = new ExtractRegistry(); Extract recentDocuments = new RecentDocumentsByLnk(); Extract chrome = new Chrome(); Extract firefox = new Firefox(); Extract SEUQA = new SearchEngineURLQueryAnalyzer(); Extract osExtract = new ExtractOs(); Extract dataSourceAnalyzer = new DataSourceUsageAnalyzer(); Extract safari = new ExtractSafari(); Extract zoneInfo = new ExtractZoneIdentifier(); Extract recycleBin = new ExtractRecycleBin(); extractors.add(chrome); extractors.add(firefox); extractors.add(iexplore); extractors.add(edge); extractors.add(safari); extractors.add(recentDocuments); extractors.add(SEUQA); // this needs to run after the web browser modules extractors.add(registry); // this should run after quicker modules like the browser modules and needs to run before the DataSourceUsageAnalyzer extractors.add(osExtract); // this needs to run before the DataSourceUsageAnalyzer extractors.add(dataSourceAnalyzer); //this needs to run after ExtractRegistry and ExtractOs extractors.add(zoneInfo); // this needs to run after the web browser modules extractors.add(recycleBin); // this needs to run after ExtractRegistry and ExtractOS browserExtractors.add(chrome); browserExtractors.add(firefox); browserExtractors.add(iexplore); browserExtractors.add(edge); browserExtractors.add(safari); for (Extract extractor : extractors) { extractor.init(); } } @Override public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) { services.postMessage(IngestMessage.createMessage(MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.started", dataSource.getName()))); progressBar.switchToDeterminate(extractors.size()); ArrayList<String> errors = new ArrayList<>(); for (int i = 0; i < extractors.size(); i++) { Extract extracter = extractors.get(i); if (context.dataSourceIngestIsCancelled()) { logger.log(Level.INFO, "Recent Activity has been canceled, quitting before {0}", extracter.getName()); //NON-NLS break; } progressBar.progress(extracter.getName(), i); try { extracter.process(dataSource, context, progressBar); } catch (Exception ex) { logger.log(Level.SEVERE, "Exception occurred in " + extracter.getName(), ex); //NON-NLS subCompleted.append(NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errModFailed", extracter.getName())); errors.add( NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errModErrs", RecentActivityExtracterModuleFactory.getModuleName())); } progressBar.progress(i + 1); errors.addAll(extracter.getErrorMessages()); } // create the final message for inbox StringBuilder errorMessage = new StringBuilder(); String errorMsgSubject; MessageType msgLevel = MessageType.INFO; if (errors.isEmpty() == false) { msgLevel = MessageType.ERROR; errorMessage.append( NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errMsg.errsEncountered")); for (String msg : errors) { errorMessage.append("<li>").append(msg).append("</li>\n"); //NON-NLS } errorMessage.append("</ul>\n"); //NON-NLS if (errors.size() == 1) { errorMsgSubject = NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errMsgSub.oneErr"); } else { errorMsgSubject = NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errMsgSub.nErrs", errors.size()); } } else { errorMessage.append(NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errMsg.noErrs")); errorMsgSubject = NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errMsgSub.noErrs"); } final IngestMessage msg = IngestMessage.createMessage(msgLevel, RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.ingestMsg.finished", dataSource.getName(), errorMsgSubject), errorMessage.toString()); services.postMessage(msg); StringBuilder historyMsg = new StringBuilder(); historyMsg.append( NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.histMsg.title", dataSource.getName())); for (Extract module : browserExtractors) { historyMsg.append("<li>").append(module.getName()); //NON-NLS historyMsg.append(": ").append((module.foundData()) ? NbBundle .getMessage(this.getClass(), "RAImageIngestModule.process.histMsg.found") : NbBundle .getMessage(this.getClass(), "RAImageIngestModule.process.histMsg.notFnd")); historyMsg.append("</li>"); //NON-NLS } historyMsg.append("</ul>"); //NON-NLS final IngestMessage inboxMsg = IngestMessage.createMessage(MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.ingestMsg.results", dataSource.getName()), historyMsg.toString()); services.postMessage(inboxMsg); if (context.dataSourceIngestIsCancelled()) { return ProcessResult.OK; } for (int i = 0; i < extractors.size(); i++) { Extract extracter = extractors.get(i); try { extracter.complete(); } catch (Exception ex) { logger.log(Level.SEVERE, "Exception occurred when completing " + extracter.getName(), ex); //NON-NLS subCompleted.append(NbBundle.getMessage(this.getClass(), "RAImageIngestModule.complete.errMsg.failed", extracter.getName())); } } return ProcessResult.OK; } /** * Get the temp path for a specific sub-module in recent activity. Will * create the dir if it doesn't exist. * * @param a_case Case that directory is for * @param mod Module name that will be used for a sub folder in the temp * folder to prevent name collisions * * @return Path to directory */ protected static String getRATempPath(Case a_case, String mod) { String tmpDir = a_case.getTempDirectory() + File.separator + "RecentActivity" + File.separator + mod; //NON-NLS File dir = new File(tmpDir); if (dir.exists() == false) { dir.mkdirs(); } return tmpDir; } /** * Get the output path for a specific sub-module in recent activity. Will * create the dir if it doesn't exist. * * @param a_case Case that directory is for * @param mod Module name that will be used for a sub folder in the temp * folder to prevent name collisions * * @return Path to directory */ protected static String getRAOutputPath(Case a_case, String mod) { String tmpDir = a_case.getModuleDirectory() + File.separator + "RecentActivity" + File.separator + mod; //NON-NLS File dir = new File(tmpDir); if (dir.exists() == false) { dir.mkdirs(); } return tmpDir; } /** * Get relative path for module output folder. * * @throws NoCurrentCaseException if there is no open case. * @return the relative path of the module output folder */ static String getRelModuleOutputPath() throws NoCurrentCaseException { return Paths.get(Case.getCurrentCaseThrows().getModuleOutputDirectoryRelativePath(), "RecentActivity").normalize().toString() ; //NON-NLS } }
/* * Copyright (c) 2012-2015, b3log.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.b3log.symphony.service; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.inject.Inject; import org.apache.commons.lang.ArrayUtils; import org.b3log.latke.Keys; import org.b3log.latke.logging.Level; import org.b3log.latke.logging.Logger; import org.b3log.latke.model.Pagination; import org.b3log.latke.model.User; import org.b3log.latke.repository.CompositeFilter; import org.b3log.latke.repository.CompositeFilterOperator; import org.b3log.latke.repository.Filter; import org.b3log.latke.repository.FilterOperator; import org.b3log.latke.repository.PropertyFilter; import org.b3log.latke.repository.Query; import org.b3log.latke.repository.RepositoryException; import org.b3log.latke.repository.SortDirection; import org.b3log.latke.service.ServiceException; import org.b3log.latke.service.annotation.Service; import org.b3log.latke.util.CollectionUtils; import org.b3log.latke.util.Paginator; import org.b3log.symphony.model.Common; import org.b3log.symphony.model.Tag; import org.b3log.symphony.repository.TagRepository; import org.b3log.symphony.repository.TagTagRepository; import org.b3log.symphony.repository.UserRepository; import org.b3log.symphony.repository.UserTagRepository; import org.b3log.symphony.util.Symphonys; import org.json.JSONArray; import org.json.JSONObject; /** * Tag query service. * * @author <a href="http://88250.b3log.org">Liang Ding</a> * @version 1.4.0.4, Jul 20, 2015 * @since 0.2.0 */ @Service public class TagQueryService { /** * Logger. */ private static final Logger LOGGER = Logger.getLogger(TagQueryService.class.getName()); /** * Tag repository. */ @Inject private TagRepository tagRepository; /** * User-Tag repository. */ @Inject private UserTagRepository userTagRepository; /** * Tag-Tag repository. */ @Inject private TagTagRepository tagTagRepository; /** * User repository. */ @Inject private UserRepository userRepository; /** * Avatar query service. */ @Inject private AvatarQueryService avatarQueryService; /** * Determines whether the specified tag title is reserved. * * @param tagTitle the specified tag title * @return {@code true} if it is reserved, otherwise returns {@code false} */ public boolean isReservedTag(final String tagTitle) { return ArrayUtils.contains(Symphonys.RESERVED_TAGS, tagTitle); } /** * Gets a tag by the specified tag title. * * @param tagTitle the specified tag title * @return tag, returns {@code null} if not null * @throws ServiceException service exception */ public JSONObject getTagByTitle(final String tagTitle) throws ServiceException { try { return tagRepository.getByTitle(tagTitle); } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, "Gets tag [title=" + tagTitle + "] failed", e); throw new ServiceException(e); } } /** * Gets the trend (sort by reference count descending) tags. * * @param fetchSize the specified fetch size * @return trend tags, returns an empty list if not found * @throws ServiceException service exception */ public List<JSONObject> getTrendTags(final int fetchSize) throws ServiceException { final Query query = new Query().addSort(Tag.TAG_REFERENCE_CNT, SortDirection.DESCENDING). setCurrentPageNum(1).setPageSize(fetchSize).setPageCount(1); try { final JSONObject result = tagRepository.get(query); return CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS)); } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, "Gets trend tags failed"); throw new ServiceException(e); } } /** * Gets the cold (sort by reference count ascending) tags. * * @param fetchSize the specified fetch size * @return trend tags, returns an empty list if not found * @throws ServiceException service exception */ public List<JSONObject> getColdTags(final int fetchSize) throws ServiceException { final Query query = new Query().addSort(Tag.TAG_REFERENCE_CNT, SortDirection.ASCENDING). setCurrentPageNum(1).setPageSize(fetchSize).setPageCount(1); try { final JSONObject result = tagRepository.get(query); return CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS)); } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, "Gets cold tags failed", e); throw new ServiceException(e); } } /** * Gets the tags the specified fetch size. * * @param fetchSize the specified fetch size * @return tags, returns an empty list if not found * @throws ServiceException service exception */ public List<JSONObject> getTags(final int fetchSize) throws ServiceException { final Query query = new Query().addSort(" RAND() " /* MySQL navtive dialect here*/, SortDirection.ASCENDING). setFilter(new PropertyFilter(Tag.TAG_ICON_PATH, FilterOperator.NOT_EQUAL, "")). setPageCount(1).setPageSize(fetchSize); try { final JSONObject result = tagRepository.get(query); return CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS)); } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, "Gets tags failed", e); throw new ServiceException(e); } } /** * Gets the creator of the specified tag of the given tag id. * * @param tagId the given tag id * @return tag creator, for example, <pre> * { * "tagCreatorThumbnailURL": "", * "tagCreatorName": "" * } * </pre>, returns {@code null} if not found * * @throws ServiceException service exception */ public JSONObject getCreator(final String tagId) throws ServiceException { final List<Filter> filters = new ArrayList<Filter>(); filters.add(new PropertyFilter(Tag.TAG + '_' + Keys.OBJECT_ID, FilterOperator.EQUAL, tagId)); filters.add(new PropertyFilter(Common.TYPE, FilterOperator.EQUAL, 0)); final Query query = new Query().setCurrentPageNum(1).setPageSize(1).setPageCount(1). setFilter(new CompositeFilter(CompositeFilterOperator.AND, filters)); try { final JSONObject result = userTagRepository.get(query); final JSONArray results = result.optJSONArray(Keys.RESULTS); final JSONObject creatorTagRelation = results.optJSONObject(0); final String creatorId = creatorTagRelation.optString(User.USER + '_' + Keys.OBJECT_ID); final JSONObject creator = userRepository.get(creatorId); final String creatorEmail = creator.optString(User.USER_EMAIL); final String thumbnailURL = avatarQueryService.getAvatarURL(creatorEmail); final JSONObject ret = new JSONObject(); ret.put(Tag.TAG_T_CREATOR_THUMBNAIL_URL, thumbnailURL); ret.put(Tag.TAG_T_CREATOR_NAME, creator.optString(User.USER_NAME)); return ret; } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, "Gets tag creator failed", e); throw new ServiceException(e); } } /** * Gets the participants (article ref) of the specified tag of the given tag id. * * @param tagId the given tag id * @param fetchSize the specified fetch size * @return tag participants, for example, <pre> * [ * { * "tagParticipantName": "", * "tagParticipantThumbnailURL": "" * }, .... * ] * </pre>, returns an empty list if not found * * @throws ServiceException service exception */ public List<JSONObject> getParticipants(final String tagId, final int fetchSize) throws ServiceException { final List<Filter> filters = new ArrayList<Filter>(); filters.add(new PropertyFilter(Tag.TAG + '_' + Keys.OBJECT_ID, FilterOperator.EQUAL, tagId)); filters.add(new PropertyFilter(Common.TYPE, FilterOperator.EQUAL, 1)); Query query = new Query().setCurrentPageNum(1).setPageSize(fetchSize).setPageCount(1). setFilter(new CompositeFilter(CompositeFilterOperator.AND, filters)); final List<JSONObject> ret = new ArrayList<JSONObject>(); try { JSONObject result = userTagRepository.get(query); final JSONArray userTagRelations = result.optJSONArray(Keys.RESULTS); final Set<String> userIds = new HashSet<String>(); for (int i = 0; i < userTagRelations.length(); i++) { userIds.add(userTagRelations.optJSONObject(i).optString(User.USER + '_' + Keys.OBJECT_ID)); } query = new Query().setFilter(new PropertyFilter(Keys.OBJECT_ID, FilterOperator.IN, userIds)); result = userRepository.get(query); final List<JSONObject> users = CollectionUtils.<JSONObject>jsonArrayToList(result.optJSONArray(Keys.RESULTS)); for (final JSONObject user : users) { final JSONObject participant = new JSONObject(); participant.put(Tag.TAG_T_PARTICIPANT_NAME, user.optString(User.USER_NAME)); final String thumbnailURL = avatarQueryService.getAvatarURL(user.optString(User.USER_EMAIL)); participant.put(Tag.TAG_T_PARTICIPANT_THUMBNAIL_URL, thumbnailURL); ret.add(participant); } return ret; } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, "Gets tag participants failed", e); throw new ServiceException(e); } } /** * Gets the related tags of the specified tag of the given tag id. * * @param tagId the given tag id * @param fetchSize the specified fetch size * @return related tags, for example, <pre> * [{ * "oId": "", * "tagTitle": "", * "tagDescription": "", * .... * }, ....] * </pre>, returns an empty list if not found * * @throws ServiceException service exception */ public List<JSONObject> getRelatedTags(final String tagId, final int fetchSize) throws ServiceException { final List<JSONObject> ret = new ArrayList<JSONObject>(); final Set<String> tagIds = new HashSet<String>(); try { JSONObject result = tagTagRepository.getByTag1Id(tagId, 1, fetchSize); JSONArray relations = result.optJSONArray(Keys.RESULTS); boolean full = false; for (int i = 0; i < relations.length(); i++) { tagIds.add(relations.optJSONObject(i).optString(Tag.TAG + "2_" + Keys.OBJECT_ID)); if (tagIds.size() >= fetchSize) { full = true; break; } } if (!full) { result = tagTagRepository.getByTag2Id(tagId, 1, fetchSize); relations = result.optJSONArray(Keys.RESULTS); for (int i = 0; i < relations.length(); i++) { tagIds.add(relations.optJSONObject(i).optString(Tag.TAG + "1_" + Keys.OBJECT_ID)); if (tagIds.size() >= fetchSize) { break; } } } final Map<String, JSONObject> tags = tagRepository.get(tagIds); final Collection<JSONObject> values = tags.values(); ret.addAll(values); return ret; } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, "Gets related tags failed", e); throw new ServiceException(e); } } /** * Gets tags by the specified request json object. * * @param requestJSONObject the specified request json object, for example, <pre> * { * "tagTitle": "", // optional * "paginationCurrentPageNum": 1, * "paginationPageSize": 20, * "paginationWindowSize": 10 * }, see {@link Pagination} for more details * </pre> * * @param tagFields the specified tag fields to return * * @return for example, <pre> * { * "pagination": { * "paginationPageCount": 100, * "paginationPageNums": [1, 2, 3, 4, 5] * }, * "tags": [{ * "oId": "", * "tagTitle": "", * "tagDescription": "", * .... * }, ....] * } * </pre> * * @throws ServiceException service exception * @see Pagination */ public JSONObject getTags(final JSONObject requestJSONObject, final Map<String, Class<?>> tagFields) throws ServiceException { final JSONObject ret = new JSONObject(); final int currentPageNum = requestJSONObject.optInt(Pagination.PAGINATION_CURRENT_PAGE_NUM); final int pageSize = requestJSONObject.optInt(Pagination.PAGINATION_PAGE_SIZE); final int windowSize = requestJSONObject.optInt(Pagination.PAGINATION_WINDOW_SIZE); final Query query = new Query().setCurrentPageNum(currentPageNum).setPageSize(pageSize). addSort(Keys.OBJECT_ID, SortDirection.DESCENDING); for (final Map.Entry<String, Class<?>> tagField : tagFields.entrySet()) { query.addProjection(tagField.getKey(), tagField.getValue()); } if (requestJSONObject.has(Tag.TAG_TITLE)) { query.setFilter(new PropertyFilter(Tag.TAG_TITLE, FilterOperator.EQUAL, requestJSONObject.optString(Tag.TAG_TITLE))); } JSONObject result = null; try { result = tagRepository.get(query); } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, "Gets tags failed", e); throw new ServiceException(e); } final int pageCount = result.optJSONObject(Pagination.PAGINATION).optInt(Pagination.PAGINATION_PAGE_COUNT); final JSONObject pagination = new JSONObject(); ret.put(Pagination.PAGINATION, pagination); final List<Integer> pageNums = Paginator.paginate(currentPageNum, pageSize, pageCount, windowSize); pagination.put(Pagination.PAGINATION_PAGE_COUNT, pageCount); pagination.put(Pagination.PAGINATION_PAGE_NUMS, pageNums); final JSONArray data = result.optJSONArray(Keys.RESULTS); final List<JSONObject> tags = CollectionUtils.<JSONObject>jsonArrayToList(data); for (final JSONObject tag : tags) { tag.put(Tag.TAG_T_CREATE_TIME, new Date(tag.optLong(Keys.OBJECT_ID))); } ret.put(Tag.TAGS, tags); return ret; } /** * Gets a tag by the specified id. * * @param tagId the specified id * @return tag, return {@code null} if not found * @throws ServiceException service exception */ public JSONObject getTag(final String tagId) throws ServiceException { try { final JSONObject ret = tagRepository.get(tagId); if (null == ret) { return null; } return ret; } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, "Gets a tag [tagId=" + tagId + "] failed", e); throw new ServiceException(e); } } }
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts; import java.util.Collections; import java.util.List; import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.Label; import org.eclipse.draw2d.geometry.Point; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.transaction.RunnableWithResult; import org.eclipse.gef.AccessibleEditPart; import org.eclipse.gef.EditPolicy; import org.eclipse.gef.Request; import org.eclipse.gef.requests.DirectEditRequest; import org.eclipse.gef.tools.DirectEditManager; import org.eclipse.gmf.runtime.common.ui.services.parser.IParser; import org.eclipse.gmf.runtime.common.ui.services.parser.IParserEditStatus; import org.eclipse.gmf.runtime.common.ui.services.parser.ParserEditStatus; import org.eclipse.gmf.runtime.common.ui.services.parser.ParserOptions; import org.eclipse.gmf.runtime.diagram.ui.editparts.CompartmentEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.ITextAwareEditPart; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.LabelDirectEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.l10n.DiagramColorRegistry; import org.eclipse.gmf.runtime.diagram.ui.label.ILabelDelegate; import org.eclipse.gmf.runtime.diagram.ui.label.WrappingLabelDelegate; import org.eclipse.gmf.runtime.diagram.ui.requests.RequestConstants; import org.eclipse.gmf.runtime.diagram.ui.tools.TextDirectEditManager; import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel; import org.eclipse.gmf.runtime.emf.core.util.EObjectAdapter; import org.eclipse.gmf.runtime.emf.ui.services.parser.ISemanticParser; import org.eclipse.gmf.runtime.notation.FontStyle; import org.eclipse.gmf.runtime.notation.NotationPackage; import org.eclipse.gmf.runtime.notation.View; import org.eclipse.gmf.tooling.runtime.directedit.TextDirectEditManager2; import org.eclipse.gmf.tooling.runtime.draw2d.labels.SimpleLabelDelegate; import org.eclipse.gmf.tooling.runtime.edit.policies.DefaultNodeLabelDragPolicy; import org.eclipse.gmf.tooling.runtime.edit.policies.labels.IRefreshableFeedbackEditPolicy; import org.eclipse.jface.text.contentassist.IContentAssistProcessor; import org.eclipse.jface.viewers.ICellEditorValidator; import org.eclipse.swt.SWT; import org.eclipse.swt.accessibility.AccessibleEvent; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.FontData; import org.eclipse.swt.graphics.Image; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.EsbTextSelectionEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbParserProvider; /** * @generated */ public class LoadBalanceEndPointEndPointName2EditPart extends CompartmentEditPart implements ITextAwareEditPart { /** * @generated */ public static final int VISUAL_ID = 5164; /** * @generated */ private DirectEditManager manager; /** * @generated */ private IParser parser; /** * @generated */ private List<?> parserElements; /** * @generated */ private String defaultText; /** * @generated */ private ILabelDelegate labelDelegate; /** * @generated */ public LoadBalanceEndPointEndPointName2EditPart(View view) { super(view); } /** * @generated */ protected void createDefaultEditPolicies() { super.createDefaultEditPolicies(); installEditPolicy(EditPolicy.SELECTION_FEEDBACK_ROLE, new EsbTextSelectionEditPolicy()); installEditPolicy(EditPolicy.DIRECT_EDIT_ROLE, new LabelDirectEditPolicy()); installEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE, new DefaultNodeLabelDragPolicy()); } /** * @generated */ protected String getLabelTextHelper(IFigure figure) { if (figure instanceof WrappingLabel) { return ((WrappingLabel) figure).getText(); } else if (figure instanceof Label) { return ((Label) figure).getText(); } else { return getLabelDelegate().getText(); } } /** * @generated */ protected void setLabelTextHelper(IFigure figure, String text) { if (figure instanceof WrappingLabel) { ((WrappingLabel) figure).setText(text); } else if (figure instanceof Label) { ((Label) figure).setText(text); } else { getLabelDelegate().setText(text); } } /** * @generated */ protected Image getLabelIconHelper(IFigure figure) { if (figure instanceof WrappingLabel) { return ((WrappingLabel) figure).getIcon(); } else if (figure instanceof Label) { return ((Label) figure).getIcon(); } else { return getLabelDelegate().getIcon(0); } } /** * @generated */ protected void setLabelIconHelper(IFigure figure, Image icon) { if (figure instanceof WrappingLabel) { ((WrappingLabel) figure).setIcon(icon); return; } else if (figure instanceof Label) { ((Label) figure).setIcon(icon); return; } else { getLabelDelegate().setIcon(icon, 0); } } /** * @generated */ public void setLabel(WrappingLabel figure) { unregisterVisuals(); setFigure(figure); defaultText = getLabelTextHelper(figure); registerVisuals(); refreshVisuals(); } /** * @generated */ @SuppressWarnings("rawtypes") protected List getModelChildren() { return Collections.EMPTY_LIST; } /** * @generated */ public IGraphicalEditPart getChildBySemanticHint(String semanticHint) { return null; } /** * @generated */ protected EObject getParserElement() { return resolveSemanticElement(); } /** * @generated */ protected Image getLabelIcon() { return null; } /** * @generated */ protected String getLabelText() { String text = null; EObject parserElement = getParserElement(); if (parserElement != null && getParser() != null) { text = getParser().getPrintString(new EObjectAdapter(parserElement), getParserOptions().intValue()); } if (text == null || text.length() == 0) { text = defaultText; } return text; } /** * @generated */ public void setLabelText(String text) { setLabelTextHelper(getFigure(), text); refreshSelectionFeedback(); } /** * @generated */ public String getEditText() { if (getParserElement() == null || getParser() == null) { return ""; //$NON-NLS-1$ } return getParser().getEditString(new EObjectAdapter(getParserElement()), getParserOptions().intValue()); } /** * @generated */ protected boolean isEditable() { return false; } /** * @generated */ public ICellEditorValidator getEditTextValidator() { return new ICellEditorValidator() { public String isValid(final Object value) { if (value instanceof String) { final EObject element = getParserElement(); final IParser parser = getParser(); try { IParserEditStatus valid = (IParserEditStatus) getEditingDomain().runExclusive( new RunnableWithResult.Impl<IParserEditStatus>() { public void run() { setResult(parser.isValidEditString(new EObjectAdapter(element), (String) value)); } }); return valid.getCode() == ParserEditStatus.EDITABLE ? null : valid.getMessage(); } catch (InterruptedException ie) { ie.printStackTrace(); } } // shouldn't get here return null; } }; } /** * @generated */ public IContentAssistProcessor getCompletionProcessor() { if (getParserElement() == null || getParser() == null) { return null; } return getParser().getCompletionProcessor(new EObjectAdapter(getParserElement())); } /** * @generated */ public ParserOptions getParserOptions() { return ParserOptions.NONE; } /** * @generated */ public IParser getParser() { if (parser == null) { parser = EsbParserProvider .getParser( EsbElementTypes.LoadBalanceEndPoint_3656, getParserElement(), EsbVisualIDRegistry .getType(org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointEndPointName2EditPart.VISUAL_ID)); } return parser; } /** * @generated */ protected DirectEditManager getManager() { if (manager == null) { setManager(new TextDirectEditManager(this, null, EsbEditPartFactory.getTextCellEditorLocator(this))); } return manager; } /** * @generated */ protected void setManager(DirectEditManager manager) { this.manager = manager; } /** * @generated */ protected void performDirectEdit() { getManager().show(); } /** * @generated */ protected void performDirectEdit(Point eventLocation) { if (getManager().getClass() == TextDirectEditManager.class) { ((TextDirectEditManager) getManager()).show(eventLocation.getSWTPoint()); } } /** * @generated */ private void performDirectEdit(char initialCharacter) { if (getManager() instanceof TextDirectEditManager) { ((TextDirectEditManager) getManager()).show(initialCharacter); } else // { performDirectEdit(); } } /** * @generated */ protected void performDirectEditRequest(Request request) { final Request theRequest = request; try { getEditingDomain().runExclusive(new Runnable() { public void run() { if (isActive() && isEditable()) { if (theRequest.getExtendedData().get(RequestConstants.REQ_DIRECTEDIT_EXTENDEDDATA_INITIAL_CHAR) instanceof Character) { Character initialChar = (Character) theRequest.getExtendedData().get( RequestConstants.REQ_DIRECTEDIT_EXTENDEDDATA_INITIAL_CHAR); performDirectEdit(initialChar.charValue()); } else if ((theRequest instanceof DirectEditRequest) && (getEditText().equals(getLabelText()))) { DirectEditRequest editRequest = (DirectEditRequest) theRequest; performDirectEdit(editRequest.getLocation()); } else { performDirectEdit(); } } } }); } catch (InterruptedException e) { e.printStackTrace(); } } /** * @generated */ protected void refreshVisuals() { super.refreshVisuals(); refreshLabel(); refreshFont(); refreshFontColor(); refreshUnderline(); refreshStrikeThrough(); } /** * @generated */ protected void refreshLabel() { setLabelTextHelper(getFigure(), getLabelText()); setLabelIconHelper(getFigure(), getLabelIcon()); refreshSelectionFeedback(); } /** * @generated */ protected void refreshUnderline() { FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(NotationPackage.eINSTANCE.getFontStyle()); if (style != null && getFigure() instanceof WrappingLabel) { ((WrappingLabel) getFigure()).setTextUnderline(style.isUnderline()); } } /** * @generated */ protected void refreshStrikeThrough() { FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(NotationPackage.eINSTANCE.getFontStyle()); if (style != null && getFigure() instanceof WrappingLabel) { ((WrappingLabel) getFigure()).setTextStrikeThrough(style.isStrikeThrough()); } } /** * @generated */ protected void refreshFont() { FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(NotationPackage.eINSTANCE.getFontStyle()); if (style != null) { FontData fontData = new FontData(style.getFontName(), style.getFontHeight(), (style.isBold() ? SWT.BOLD : SWT.NORMAL) | (style.isItalic() ? SWT.ITALIC : SWT.NORMAL)); setFont(fontData); } } /** * @generated */ private void refreshSelectionFeedback() { requestEditPolicyFeedbackRefresh(EditPolicy.PRIMARY_DRAG_ROLE); requestEditPolicyFeedbackRefresh(EditPolicy.SELECTION_FEEDBACK_ROLE); } /** * @generated */ private void requestEditPolicyFeedbackRefresh(String editPolicyKey) { Object editPolicy = getEditPolicy(editPolicyKey); if (editPolicy instanceof IRefreshableFeedbackEditPolicy) { ((IRefreshableFeedbackEditPolicy) editPolicy).refreshFeedback(); } } /** * @generated */ protected void setFontColor(Color color) { getFigure().setForegroundColor(color); } /** * @generated */ protected void addSemanticListeners() { if (getParser() instanceof ISemanticParser) { EObject element = resolveSemanticElement(); parserElements = ((ISemanticParser) getParser()).getSemanticElementsBeingParsed(element); for (int i = 0; i < parserElements.size(); i++) { addListenerFilter("SemanticModel" + i, this, (EObject) parserElements.get(i)); //$NON-NLS-1$ } } else { super.addSemanticListeners(); } } /** * @generated */ protected void removeSemanticListeners() { if (parserElements != null) { for (int i = 0; i < parserElements.size(); i++) { removeListenerFilter("SemanticModel" + i); //$NON-NLS-1$ } } else { super.removeSemanticListeners(); } } /** * @generated */ protected AccessibleEditPart getAccessibleEditPart() { if (accessibleEP == null) { accessibleEP = new AccessibleGraphicalEditPart() { public void getName(AccessibleEvent e) { e.result = getLabelTextHelper(getFigure()); } }; } return accessibleEP; } /** * @generated */ private View getFontStyleOwnerView() { return getPrimaryView(); } /** * @generated */ private ILabelDelegate getLabelDelegate() { if (labelDelegate == null) { IFigure label = getFigure(); if (label instanceof WrappingLabel) { labelDelegate = new WrappingLabelDelegate((WrappingLabel) label); } else { labelDelegate = new SimpleLabelDelegate((Label) label); } } return labelDelegate; } /** * @generated */ @Override public Object getAdapter(Class key) { if (ILabelDelegate.class.equals(key)) { return getLabelDelegate(); } return super.getAdapter(key); } /** * @generated */ protected void addNotationalListeners() { super.addNotationalListeners(); addListenerFilter("PrimaryView", this, getPrimaryView()); //$NON-NLS-1$ } /** * @generated */ protected void removeNotationalListeners() { super.removeNotationalListeners(); removeListenerFilter("PrimaryView"); //$NON-NLS-1$ } /** * @generated */ protected void handleNotificationEvent(Notification event) { Object feature = event.getFeature(); if (NotationPackage.eINSTANCE.getFontStyle_FontColor().equals(feature)) { Integer c = (Integer) event.getNewValue(); setFontColor(DiagramColorRegistry.getInstance().getColor(c)); } else if (NotationPackage.eINSTANCE.getFontStyle_Underline().equals(feature)) { refreshUnderline(); } else if (NotationPackage.eINSTANCE.getFontStyle_StrikeThrough().equals(feature)) { refreshStrikeThrough(); } else if (NotationPackage.eINSTANCE.getFontStyle_FontHeight().equals(feature) || NotationPackage.eINSTANCE.getFontStyle_FontName().equals(feature) || NotationPackage.eINSTANCE.getFontStyle_Bold().equals(feature) || NotationPackage.eINSTANCE.getFontStyle_Italic().equals(feature)) { refreshFont(); } else { if (getParser() != null && getParser().isAffectingEvent(event, getParserOptions().intValue())) { refreshLabel(); } if (getParser() instanceof ISemanticParser) { ISemanticParser modelParser = (ISemanticParser) getParser(); if (modelParser.areSemanticElementsAffected(null, event)) { removeSemanticListeners(); if (resolveSemanticElement() != null) { addSemanticListeners(); } refreshLabel(); } } } super.handleNotificationEvent(event); } /** * @generated */ protected IFigure createFigure() { // Parent should assign one using setLabel() method return null; } }/*package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts; import java.util.Collections; import java.util.List; import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.Label; import org.eclipse.draw2d.geometry.Point; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.transaction.RunnableWithResult; import org.eclipse.gef.AccessibleEditPart; import org.eclipse.gef.EditPolicy; import org.eclipse.gef.Request; import org.eclipse.gef.requests.DirectEditRequest; import org.eclipse.gef.tools.DirectEditManager; import org.eclipse.gmf.runtime.common.ui.services.parser.IParser; import org.eclipse.gmf.runtime.common.ui.services.parser.IParserEditStatus; import org.eclipse.gmf.runtime.common.ui.services.parser.ParserEditStatus; import org.eclipse.gmf.runtime.common.ui.services.parser.ParserOptions; import org.eclipse.gmf.runtime.diagram.ui.editparts.CompartmentEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.ITextAwareEditPart; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.LabelDirectEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.l10n.DiagramColorRegistry; import org.eclipse.gmf.runtime.diagram.ui.requests.RequestConstants; import org.eclipse.gmf.runtime.diagram.ui.tools.TextDirectEditManager; import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel; import org.eclipse.gmf.runtime.emf.core.util.EObjectAdapter; import org.eclipse.gmf.runtime.emf.ui.services.parser.ISemanticParser; import org.eclipse.gmf.runtime.notation.FontStyle; import org.eclipse.gmf.runtime.notation.NotationPackage; import org.eclipse.gmf.runtime.notation.View; import org.eclipse.jface.text.contentassist.IContentAssistProcessor; import org.eclipse.jface.viewers.ICellEditorValidator; import org.eclipse.swt.SWT; import org.eclipse.swt.accessibility.AccessibleEvent; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.FontData; import org.eclipse.swt.graphics.Image; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.EsbTextSelectionEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbParserProvider; */ /** * @generated */ /* public class LoadBalanceEndPointEndPointName2EditPart extends CompartmentEditPart implements ITextAwareEditPart { *//** * @generated */ /* public static final int VISUAL_ID = 5103; *//** * @generated */ /* private DirectEditManager manager; *//** * @generated */ /* private IParser parser; *//** * @generated */ /* private List<?> parserElements; *//** * @generated */ /* private String defaultText; *//** * @generated */ /* public LoadBalanceEndPointEndPointName2EditPart(View view) { super(view); } *//** * @generated */ /* protected void createDefaultEditPolicies() { super.createDefaultEditPolicies(); installEditPolicy(EditPolicy.SELECTION_FEEDBACK_ROLE, new EsbTextSelectionEditPolicy()); installEditPolicy(EditPolicy.DIRECT_EDIT_ROLE, new LabelDirectEditPolicy()); installEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE, new EsbDiagramEditPart.NodeLabelDragPolicy()); } *//** * @generated */ /* protected String getLabelTextHelper(IFigure figure) { if (figure instanceof WrappingLabel) { return ((WrappingLabel) figure).getText(); } else { return ((Label) figure).getText(); } } *//** * @generated */ /* protected void setLabelTextHelper(IFigure figure, String text) { if (figure instanceof WrappingLabel) { ((WrappingLabel) figure).setText(text); } else { ((Label) figure).setText(text); } } *//** * @generated */ /* protected Image getLabelIconHelper(IFigure figure) { if (figure instanceof WrappingLabel) { return ((WrappingLabel) figure).getIcon(); } else { return ((Label) figure).getIcon(); } } *//** * @generated */ /* protected void setLabelIconHelper(IFigure figure, Image icon) { if (figure instanceof WrappingLabel) { ((WrappingLabel) figure).setIcon(icon); } else { ((Label) figure).setIcon(icon); } } *//** * @generated */ /* public void setLabel(WrappingLabel figure) { unregisterVisuals(); setFigure(figure); defaultText = getLabelTextHelper(figure); registerVisuals(); refreshVisuals(); } *//** * @generated */ /* @SuppressWarnings("rawtypes") protected List getModelChildren() { return Collections.EMPTY_LIST; } *//** * @generated */ /* public IGraphicalEditPart getChildBySemanticHint(String semanticHint) { return null; } *//** * @generated */ /* protected EObject getParserElement() { return resolveSemanticElement(); } *//** * @generated */ /* protected Image getLabelIcon() { return null; } *//** * @generated */ /* protected String getLabelText() { String text = null; EObject parserElement = getParserElement(); if (parserElement != null && getParser() != null) { text = getParser().getPrintString(new EObjectAdapter(parserElement), getParserOptions().intValue()); } if (text == null || text.length() == 0) { text = defaultText; } return text; } *//** * @generated */ /* public void setLabelText(String text) { setLabelTextHelper(getFigure(), text); Object pdEditPolicy = getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE); if (pdEditPolicy instanceof EsbTextSelectionEditPolicy) { ((EsbTextSelectionEditPolicy) pdEditPolicy).refreshFeedback(); } Object sfEditPolicy = getEditPolicy(EditPolicy.SELECTION_FEEDBACK_ROLE); if (sfEditPolicy instanceof EsbTextSelectionEditPolicy) { ((EsbTextSelectionEditPolicy) sfEditPolicy).refreshFeedback(); } } *//** * @generated */ /* public String getEditText() { if (getParserElement() == null || getParser() == null) { return ""; //$NON-NLS-1$ } return getParser().getEditString(new EObjectAdapter(getParserElement()), getParserOptions().intValue()); } *//** * @generated */ /* protected boolean isEditable() { return false; } *//** * @generated */ /* public ICellEditorValidator getEditTextValidator() { return new ICellEditorValidator() { public String isValid(final Object value) { if (value instanceof String) { final EObject element = getParserElement(); final IParser parser = getParser(); try { IParserEditStatus valid = (IParserEditStatus) getEditingDomain().runExclusive(new RunnableWithResult.Impl<IParserEditStatus>() { public void run() { setResult(parser.isValidEditString(new EObjectAdapter( element), (String) value)); } }); return valid.getCode() == ParserEditStatus.EDITABLE ? null : valid.getMessage(); } catch (InterruptedException ie) { ie.printStackTrace(); } } // shouldn't get here return null; } }; } *//** * @generated */ /* public IContentAssistProcessor getCompletionProcessor() { if (getParserElement() == null || getParser() == null) { return null; } return getParser().getCompletionProcessor(new EObjectAdapter(getParserElement())); } *//** * @generated */ /* public ParserOptions getParserOptions() { return ParserOptions.NONE; } *//** * @generated */ /* public IParser getParser() { if (parser == null) { parser = EsbParserProvider.getParser(EsbElementTypes.LoadBalanceEndPoint_3386, getParserElement(), EsbVisualIDRegistry.getType(org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointEndPointName2EditPart.VISUAL_ID)); } return parser; } *//** * @generated */ /* protected DirectEditManager getManager() { if (manager == null) { setManager(new TextDirectEditManager( this, TextDirectEditManager.getTextCellEditorClass(this), EsbEditPartFactory.getTextCellEditorLocator(this))); } return manager; } *//** * @generated */ /* protected void setManager(DirectEditManager manager) { this.manager = manager; } *//** * @generated */ /* protected void performDirectEdit() { getManager().show(); } *//** * @generated */ /* protected void performDirectEdit(Point eventLocation) { if (getManager().getClass() == TextDirectEditManager.class) { ((TextDirectEditManager) getManager()).show(eventLocation.getSWTPoint()); } } *//** * @generated */ /* private void performDirectEdit(char initialCharacter) { if (getManager() instanceof TextDirectEditManager) { ((TextDirectEditManager) getManager()).show(initialCharacter); } else { performDirectEdit(); } } *//** * @generated */ /* protected void performDirectEditRequest(Request request) { final Request theRequest = request; try { getEditingDomain().runExclusive(new Runnable() { public void run() { if (isActive() && isEditable()) { if (theRequest.getExtendedData() .get(RequestConstants.REQ_DIRECTEDIT_EXTENDEDDATA_INITIAL_CHAR) instanceof Character) { Character initialChar = (Character) theRequest.getExtendedData() .get(RequestConstants.REQ_DIRECTEDIT_EXTENDEDDATA_INITIAL_CHAR); performDirectEdit(initialChar.charValue()); } else if ((theRequest instanceof DirectEditRequest) && (getEditText().equals(getLabelText()))) { DirectEditRequest editRequest = (DirectEditRequest) theRequest; performDirectEdit(editRequest.getLocation()); } else { performDirectEdit(); } } } }); } catch (InterruptedException e) { e.printStackTrace(); } } *//** * @generated */ /* protected void refreshVisuals() { super.refreshVisuals(); refreshLabel(); refreshFont(); refreshFontColor(); refreshUnderline(); refreshStrikeThrough(); } *//** * @generated */ /* protected void refreshLabel() { setLabelTextHelper(getFigure(), getLabelText()); setLabelIconHelper(getFigure(), getLabelIcon()); Object pdEditPolicy = getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE); if (pdEditPolicy instanceof EsbTextSelectionEditPolicy) { ((EsbTextSelectionEditPolicy) pdEditPolicy).refreshFeedback(); } Object sfEditPolicy = getEditPolicy(EditPolicy.SELECTION_FEEDBACK_ROLE); if (sfEditPolicy instanceof EsbTextSelectionEditPolicy) { ((EsbTextSelectionEditPolicy) sfEditPolicy).refreshFeedback(); } } *//** * @generated */ /* protected void refreshUnderline() { FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(NotationPackage.eINSTANCE.getFontStyle()); if (style != null && getFigure() instanceof WrappingLabel) { ((WrappingLabel) getFigure()).setTextUnderline(style.isUnderline()); } } *//** * @generated */ /* protected void refreshStrikeThrough() { FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(NotationPackage.eINSTANCE.getFontStyle()); if (style != null && getFigure() instanceof WrappingLabel) { ((WrappingLabel) getFigure()).setTextStrikeThrough(style.isStrikeThrough()); } } *//** * @generated */ /* protected void refreshFont() { FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(NotationPackage.eINSTANCE.getFontStyle()); if (style != null) { FontData fontData = new FontData(style.getFontName(), style.getFontHeight(), (style.isBold() ? SWT.BOLD : SWT.NORMAL) | (style.isItalic() ? SWT.ITALIC : SWT.NORMAL)); setFont(fontData); } } *//** * @generated */ /* protected void setFontColor(Color color) { getFigure().setForegroundColor(color); } *//** * @generated */ /* protected void addSemanticListeners() { if (getParser() instanceof ISemanticParser) { EObject element = resolveSemanticElement(); parserElements = ((ISemanticParser) getParser()).getSemanticElementsBeingParsed(element); for (int i = 0; i < parserElements.size(); i++) { addListenerFilter("SemanticModel" + i, this, (EObject) parserElements.get(i)); //$NON-NLS-1$ } } else { super.addSemanticListeners(); } } *//** * @generated */ /* protected void removeSemanticListeners() { if (parserElements != null) { for (int i = 0; i < parserElements.size(); i++) { removeListenerFilter("SemanticModel" + i); //$NON-NLS-1$ } } else { super.removeSemanticListeners(); } } *//** * @generated */ /* protected AccessibleEditPart getAccessibleEditPart() { if (accessibleEP == null) { accessibleEP = new AccessibleGraphicalEditPart() { public void getName(AccessibleEvent e) { e.result = getLabelTextHelper(getFigure()); } }; } return accessibleEP; } *//** * @generated */ /* private View getFontStyleOwnerView() { return getPrimaryView(); } *//** * @generated */ /* protected void addNotationalListeners() { super.addNotationalListeners(); addListenerFilter("PrimaryView", this, getPrimaryView()); //$NON-NLS-1$ } *//** * @generated */ /* protected void removeNotationalListeners() { super.removeNotationalListeners(); removeListenerFilter("PrimaryView"); //$NON-NLS-1$ } *//** * @generated */ /* protected void handleNotificationEvent(Notification event) { Object feature = event.getFeature(); if (NotationPackage.eINSTANCE.getFontStyle_FontColor().equals(feature)) { Integer c = (Integer) event.getNewValue(); setFontColor(DiagramColorRegistry.getInstance().getColor(c)); } else if (NotationPackage.eINSTANCE.getFontStyle_Underline().equals(feature)) { refreshUnderline(); } else if (NotationPackage.eINSTANCE.getFontStyle_StrikeThrough().equals(feature)) { refreshStrikeThrough(); } else if (NotationPackage.eINSTANCE.getFontStyle_FontHeight().equals(feature) || NotationPackage.eINSTANCE.getFontStyle_FontName().equals(feature) || NotationPackage.eINSTANCE.getFontStyle_Bold().equals(feature) || NotationPackage.eINSTANCE.getFontStyle_Italic().equals(feature)) { refreshFont(); } else { if (getParser() != null && getParser().isAffectingEvent(event, getParserOptions().intValue())) { refreshLabel(); } if (getParser() instanceof ISemanticParser) { ISemanticParser modelParser = (ISemanticParser) getParser(); if (modelParser.areSemanticElementsAffected(null, event)) { removeSemanticListeners(); if (resolveSemanticElement() != null) { addSemanticListeners(); } refreshLabel(); } } } super.handleNotificationEvent(event); } *//** * @generated */ /* protected IFigure createFigure() { // Parent should assign one using setLabel() method return null; } } */
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.adapter.java; import org.apache.calcite.DataContext; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; import org.apache.calcite.linq4j.Linq4j; import org.apache.calcite.linq4j.QueryProvider; import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.function.Function1; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.apache.calcite.linq4j.tree.Primitive; import org.apache.calcite.linq4j.tree.Types; import org.apache.calcite.rel.RelReferentialConstraint; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.schema.Function; import org.apache.calcite.schema.ScannableTable; import org.apache.calcite.schema.Schema; import org.apache.calcite.schema.SchemaFactory; import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.schema.Schemas; import org.apache.calcite.schema.Statistic; import org.apache.calcite.schema.Statistics; import org.apache.calcite.schema.Table; import org.apache.calcite.schema.TableMacro; import org.apache.calcite.schema.TranslatableTable; import org.apache.calcite.schema.impl.AbstractSchema; import org.apache.calcite.schema.impl.AbstractTableQueryable; import org.apache.calcite.schema.impl.ReflectiveFunctionBase; import org.apache.calcite.util.BuiltInMethod; import org.apache.calcite.util.Util; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.Iterables; import com.google.common.collect.Multimap; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Type; import java.util.Collections; import java.util.List; import java.util.Map; /** * Implementation of {@link org.apache.calcite.schema.Schema} that exposes the * public fields and methods in a Java object. */ public class ReflectiveSchema extends AbstractSchema { private final Class clazz; private Object target; private Map<String, Table> tableMap; private Multimap<String, Function> functionMap; /** * Creates a ReflectiveSchema. * * @param target Object whose fields will be sub-objects of the schema */ public ReflectiveSchema(Object target) { super(); this.clazz = target.getClass(); this.target = target; } @Override public String toString() { return "ReflectiveSchema(target=" + target + ")"; } /** Returns the wrapped object. * * <p>May not appear to be used, but is used in generated code via * {@link org.apache.calcite.util.BuiltInMethod#REFLECTIVE_SCHEMA_GET_TARGET}. */ public Object getTarget() { return target; } @SuppressWarnings({ "rawtypes", "unchecked" }) @Override protected Map<String, Table> getTableMap() { if (tableMap == null) { tableMap = createTableMap(); } return tableMap; } private Map<String, Table> createTableMap() { final ImmutableMap.Builder<String, Table> builder = ImmutableMap.builder(); for (Field field : clazz.getFields()) { final String fieldName = field.getName(); final Table table = fieldRelation(field); if (table == null) { continue; } builder.put(fieldName, table); } Map<String, Table> tableMap = builder.build(); // Unique-Key - Foreign-Key for (Field field : clazz.getFields()) { if (RelReferentialConstraint.class.isAssignableFrom(field.getType())) { RelReferentialConstraint rc; try { rc = (RelReferentialConstraint) field.get(target); } catch (IllegalAccessException e) { throw new RuntimeException( "Error while accessing field " + field, e); } FieldTable table = (FieldTable) tableMap.get(Util.last(rc.getSourceQualifiedName())); assert table != null; table.statistic = Statistics.of( ImmutableList.copyOf( Iterables.concat( table.getStatistic().getReferentialConstraints(), Collections.singleton(rc)))); } } return tableMap; } @Override protected Multimap<String, Function> getFunctionMultimap() { if (functionMap == null) { functionMap = createFunctionMap(); } return functionMap; } private Multimap<String, Function> createFunctionMap() { final ImmutableMultimap.Builder<String, Function> builder = ImmutableMultimap.builder(); for (Method method : clazz.getMethods()) { final String methodName = method.getName(); if (method.getDeclaringClass() == Object.class || methodName.equals("toString")) { continue; } if (TranslatableTable.class.isAssignableFrom(method.getReturnType())) { final TableMacro tableMacro = new MethodTableMacro(this, method); builder.put(methodName, tableMacro); } } return builder.build(); } /** Returns an expression for the object wrapped by this schema (not the * schema itself). */ Expression getTargetExpression(SchemaPlus parentSchema, String name) { return Types.castIfNecessary( target.getClass(), Expressions.call( Schemas.unwrap( getExpression(parentSchema, name), ReflectiveSchema.class), BuiltInMethod.REFLECTIVE_SCHEMA_GET_TARGET.method)); } /** Returns a table based on a particular field of this schema. If the * field is not of the right type to be a relation, returns null. */ private <T> Table fieldRelation(final Field field) { final Type elementType = getElementType(field.getType()); if (elementType == null) { return null; } Object o; try { o = field.get(target); } catch (IllegalAccessException e) { throw new RuntimeException( "Error while accessing field " + field, e); } @SuppressWarnings("unchecked") final Enumerable<T> enumerable = toEnumerable(o); return new FieldTable<>(field, elementType, enumerable); } /** Deduces the element type of a collection; * same logic as {@link #toEnumerable} */ private static Type getElementType(Class clazz) { if (clazz.isArray()) { return clazz.getComponentType(); } if (Iterable.class.isAssignableFrom(clazz)) { return Object.class; } return null; // not a collection/array/iterable } private static Enumerable toEnumerable(final Object o) { if (o.getClass().isArray()) { if (o instanceof Object[]) { return Linq4j.asEnumerable((Object[]) o); } else { return Linq4j.asEnumerable(Primitive.asList(o)); } } if (o instanceof Iterable) { return Linq4j.asEnumerable((Iterable) o); } throw new RuntimeException( "Cannot convert " + o.getClass() + " into a Enumerable"); } /** Table that is implemented by reading from a Java object. */ private static class ReflectiveTable extends AbstractQueryableTable implements Table, ScannableTable { private final Type elementType; private final Enumerable enumerable; ReflectiveTable(Type elementType, Enumerable enumerable) { super(elementType); this.elementType = elementType; this.enumerable = enumerable; } public RelDataType getRowType(RelDataTypeFactory typeFactory) { return ((JavaTypeFactory) typeFactory).createType(elementType); } public Statistic getStatistic() { return Statistics.UNKNOWN; } public Enumerable<Object[]> scan(DataContext root) { if (elementType == Object[].class) { //noinspection unchecked return enumerable; } else { //noinspection unchecked return enumerable.select(new FieldSelector((Class) elementType)); } } public <T> Queryable<T> asQueryable(QueryProvider queryProvider, SchemaPlus schema, String tableName) { return new AbstractTableQueryable<T>(queryProvider, schema, this, tableName) { @SuppressWarnings("unchecked") public Enumerator<T> enumerator() { return (Enumerator<T>) enumerable.enumerator(); } }; } } /** Factory that creates a schema by instantiating an object and looking at * its public fields. * * <p>The following example instantiates a {@code FoodMart} object as a schema * that contains tables called {@code EMPS} and {@code DEPTS} based on the * object's fields. * * <blockquote><pre> * schemas: [ * { * name: "foodmart", * type: "custom", * factory: "org.apache.calcite.adapter.java.ReflectiveSchema$Factory", * operand: { * class: "com.acme.FoodMart", * staticMethod: "instance" * } * } * ] * &nbsp; * class FoodMart { * public static final FoodMart instance() { * return new FoodMart(); * } * &nbsp; * Employee[] EMPS; * Department[] DEPTS; * }</pre></blockquote> */ public static class Factory implements SchemaFactory { public Schema create(SchemaPlus parentSchema, String name, Map<String, Object> operand) { Class<?> clazz; Object target; final Object className = operand.get("class"); if (className != null) { try { clazz = Class.forName((String) className); } catch (ClassNotFoundException e) { throw new RuntimeException("Error loading class " + className, e); } } else { throw new RuntimeException("Operand 'class' is required"); } final Object methodName = operand.get("staticMethod"); if (methodName != null) { try { //noinspection unchecked Method method = clazz.getMethod((String) methodName); target = method.invoke(null); } catch (Exception e) { throw new RuntimeException("Error invoking method " + methodName, e); } } else { try { final Constructor<?> constructor = clazz.getConstructor(); target = constructor.newInstance(); } catch (Exception e) { throw new RuntimeException("Error instantiating class " + className, e); } } return new ReflectiveSchema(target); } } /** Table macro based on a Java method. */ private static class MethodTableMacro extends ReflectiveFunctionBase implements TableMacro { private final ReflectiveSchema schema; MethodTableMacro(ReflectiveSchema schema, Method method) { super(method); this.schema = schema; assert TranslatableTable.class.isAssignableFrom(method.getReturnType()) : "Method should return TranslatableTable so the macro can be " + "expanded"; } public String toString() { return "Member {method=" + method + "}"; } public TranslatableTable apply(final List<Object> arguments) { try { final Object o = method.invoke(schema.getTarget(), arguments.toArray()); return (TranslatableTable) o; } catch (IllegalAccessException | InvocationTargetException e) { throw new RuntimeException(e); } } } /** Table based on a Java field. * * @param <T> element type */ private static class FieldTable<T> extends ReflectiveTable { private final Field field; private Statistic statistic; FieldTable(Field field, Type elementType, Enumerable<T> enumerable) { this(field, elementType, enumerable, Statistics.UNKNOWN); } FieldTable(Field field, Type elementType, Enumerable<T> enumerable, Statistic statistic) { super(elementType, enumerable); this.field = field; this.statistic = statistic; } public String toString() { return "Relation {field=" + field.getName() + "}"; } @Override public Statistic getStatistic() { return statistic; } @Override public Expression getExpression(SchemaPlus schema, String tableName, Class clazz) { return Expressions.field( schema.unwrap(ReflectiveSchema.class).getTargetExpression( schema.getParentSchema(), schema.getName()), field); } } /** Function that returns an array of a given object's field values. */ private static class FieldSelector implements Function1<Object, Object[]> { private final Field[] fields; FieldSelector(Class elementType) { this.fields = elementType.getFields(); } public Object[] apply(Object o) { try { final Object[] objects = new Object[fields.length]; for (int i = 0; i < fields.length; i++) { objects[i] = fields[i].get(o); } return objects; } catch (IllegalAccessException e) { throw new RuntimeException(e); } } } } // End ReflectiveSchema.java
/* * Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.product.swap; import static com.opengamma.strata.basics.currency.Currency.EUR; import static com.opengamma.strata.basics.currency.Currency.GBP; import static com.opengamma.strata.basics.date.BusinessDayConventions.FOLLOWING; import static com.opengamma.strata.basics.date.DayCounts.ACT_360; import static com.opengamma.strata.basics.date.DayCounts.ACT_365F; import static com.opengamma.strata.basics.date.DayCounts.ONE_ONE; import static com.opengamma.strata.basics.date.HolidayCalendarIds.GBLO; import static com.opengamma.strata.basics.index.FxIndices.EUR_GBP_ECB; import static com.opengamma.strata.basics.index.IborIndices.GBP_LIBOR_1M; import static com.opengamma.strata.basics.index.IborIndices.GBP_LIBOR_3M; import static com.opengamma.strata.basics.index.PriceIndices.GB_RPI; import static com.opengamma.strata.basics.schedule.Frequency.P12M; import static com.opengamma.strata.basics.schedule.Frequency.P1M; import static com.opengamma.strata.basics.schedule.Frequency.P2M; import static com.opengamma.strata.basics.schedule.Frequency.P3M; import static com.opengamma.strata.basics.schedule.StubConvention.SMART_INITIAL; import static com.opengamma.strata.collect.TestHelper.assertSerialization; import static com.opengamma.strata.collect.TestHelper.coverBeanEquals; import static com.opengamma.strata.collect.TestHelper.coverImmutableBean; import static com.opengamma.strata.collect.TestHelper.date; import static com.opengamma.strata.product.common.PayReceive.PAY; import static com.opengamma.strata.product.common.PayReceive.RECEIVE; import static com.opengamma.strata.product.swap.CompoundingMethod.STRAIGHT; import static com.opengamma.strata.product.swap.PriceIndexCalculationMethod.INTERPOLATED; import static com.opengamma.strata.product.swap.PriceIndexCalculationMethod.MONTHLY; import static com.opengamma.strata.product.swap.SwapLegType.FIXED; import static com.opengamma.strata.product.swap.SwapLegType.IBOR; import static org.assertj.core.api.Assertions.assertThat; import java.time.LocalDate; import java.time.Period; import java.time.YearMonth; import org.junit.jupiter.api.Test; import com.google.common.collect.ImmutableSet; import com.opengamma.strata.basics.ReferenceData; import com.opengamma.strata.basics.currency.CurrencyAmount; import com.opengamma.strata.basics.currency.Payment; import com.opengamma.strata.basics.date.AdjustableDate; import com.opengamma.strata.basics.date.BusinessDayAdjustment; import com.opengamma.strata.basics.date.DayCounts; import com.opengamma.strata.basics.date.DaysAdjustment; import com.opengamma.strata.basics.index.FxIndexObservation; import com.opengamma.strata.basics.index.Index; import com.opengamma.strata.basics.schedule.Frequency; import com.opengamma.strata.basics.schedule.PeriodicSchedule; import com.opengamma.strata.basics.schedule.StubConvention; import com.opengamma.strata.basics.value.ValueAdjustment; import com.opengamma.strata.basics.value.ValueSchedule; import com.opengamma.strata.basics.value.ValueStep; import com.opengamma.strata.product.rate.FixedRateComputation; import com.opengamma.strata.product.rate.IborRateComputation; import com.opengamma.strata.product.rate.InflationInterpolatedRateComputation; import com.opengamma.strata.product.rate.InflationMonthlyRateComputation; /** * Test. */ public class RateCalculationSwapLegTest { private static final ReferenceData REF_DATA = ReferenceData.standard(); private static final LocalDate DATE_01_02 = date(2014, 1, 2); private static final LocalDate DATE_01_05 = date(2014, 1, 5); private static final LocalDate DATE_01_06 = date(2014, 1, 6); private static final LocalDate DATE_02_03 = date(2014, 2, 3); private static final LocalDate DATE_02_05 = date(2014, 2, 5); private static final LocalDate DATE_02_07 = date(2014, 2, 7); private static final LocalDate DATE_03_03 = date(2014, 3, 3); private static final LocalDate DATE_03_05 = date(2014, 3, 5); private static final LocalDate DATE_03_07 = date(2014, 3, 7); private static final LocalDate DATE_04_03 = date(2014, 4, 3); private static final LocalDate DATE_04_05 = date(2014, 4, 5); private static final LocalDate DATE_04_07 = date(2014, 4, 7); private static final LocalDate DATE_04_09 = date(2014, 4, 9); private static final LocalDate DATE_05_01 = date(2014, 5, 1); private static final LocalDate DATE_05_05 = date(2014, 5, 5); private static final LocalDate DATE_05_06 = date(2014, 5, 6); private static final LocalDate DATE_05_08 = date(2014, 5, 8); private static final LocalDate DATE_06_05 = date(2014, 6, 5); private static final LocalDate DATE_06_09 = date(2014, 6, 9); private static final LocalDate DATE_14_06_09 = date(2014, 6, 9); private static final LocalDate DATE_19_06_09 = date(2019, 6, 9); private static final DaysAdjustment PLUS_THREE_DAYS = DaysAdjustment.ofBusinessDays(3, GBLO); private static final DaysAdjustment PLUS_TWO_DAYS = DaysAdjustment.ofBusinessDays(2, GBLO); private static final DaysAdjustment MINUS_TWO_DAYS = DaysAdjustment.ofBusinessDays(-2, GBLO); //------------------------------------------------------------------------- @Test public void test_builder() { BusinessDayAdjustment bda = BusinessDayAdjustment.of(FOLLOWING, GBLO); PeriodicSchedule accrualSchedule = PeriodicSchedule.builder() .startDate(DATE_01_05) .endDate(DATE_04_05) .frequency(P1M) .businessDayAdjustment(bda) .build(); PaymentSchedule paymentSchedule = PaymentSchedule.builder() .paymentFrequency(P1M) .paymentDateOffset(DaysAdjustment.ofBusinessDays(2, GBLO)) .build(); FixedRateCalculation rateCalc = FixedRateCalculation.builder() .dayCount(DayCounts.ACT_365F) .rate(ValueSchedule.of(0.025d)) .build(); NotionalSchedule notionalSchedule = NotionalSchedule.of(GBP, 1000d); RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(PAY) .accrualSchedule(accrualSchedule) .paymentSchedule(paymentSchedule) .notionalSchedule(notionalSchedule) .calculation(rateCalc) .build(); assertThat(test.getStartDate()).isEqualTo(AdjustableDate.of(DATE_01_05, bda)); assertThat(test.getEndDate()).isEqualTo(AdjustableDate.of(DATE_04_05, bda)); assertThat(test.getCurrency()).isEqualTo(GBP); assertThat(test.getPayReceive()).isEqualTo(PAY); assertThat(test.getAccrualSchedule()).isEqualTo(accrualSchedule); assertThat(test.getPaymentSchedule()).isEqualTo(paymentSchedule); assertThat(test.getNotionalSchedule()).isEqualTo(notionalSchedule); assertThat(test.getCalculation()).isEqualTo(rateCalc); } //------------------------------------------------------------------------- @Test public void test_collectIndices_simple() { RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(PAY) .accrualSchedule(PeriodicSchedule.builder() .startDate(DATE_01_05) .endDate(DATE_04_05) .frequency(P1M) .businessDayAdjustment(BusinessDayAdjustment.of(FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(P1M) .paymentDateOffset(PLUS_TWO_DAYS) .build()) .notionalSchedule(NotionalSchedule.of(GBP, 1000d)) .calculation(IborRateCalculation.builder() .dayCount(DayCounts.ACT_365F) .index(GBP_LIBOR_3M) .fixingDateOffset(MINUS_TWO_DAYS) .build()) .build(); ImmutableSet.Builder<Index> builder = ImmutableSet.builder(); test.collectIndices(builder); assertThat(builder.build()).containsOnly(GBP_LIBOR_3M); assertThat(test.allIndices()).containsOnly(GBP_LIBOR_3M); assertThat(test.allCurrencies()).containsOnly(GBP); } @Test public void test_collectIndices_fxReset() { RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(PAY) .accrualSchedule(PeriodicSchedule.builder() .startDate(DATE_01_05) .endDate(DATE_04_05) .frequency(P1M) .businessDayAdjustment(BusinessDayAdjustment.of(FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(P1M) .paymentDateOffset(PLUS_TWO_DAYS) .build()) .notionalSchedule(NotionalSchedule.builder() .currency(GBP) .amount(ValueSchedule.of(1000d)) .finalExchange(true) .fxReset(FxResetCalculation.builder() .referenceCurrency(EUR) .index(EUR_GBP_ECB) .fixingDateOffset(MINUS_TWO_DAYS) .build()) .build()) .calculation(IborRateCalculation.builder() .dayCount(DayCounts.ACT_365F) .index(GBP_LIBOR_3M) .fixingDateOffset(MINUS_TWO_DAYS) .build()) .build(); ImmutableSet.Builder<Index> builder = ImmutableSet.builder(); test.collectIndices(builder); assertThat(builder.build()).containsOnly(GBP_LIBOR_3M, EUR_GBP_ECB); assertThat(test.allIndices()).containsOnly(GBP_LIBOR_3M, EUR_GBP_ECB); assertThat(test.allCurrencies()).containsOnly(GBP, EUR); } //------------------------------------------------------------------------- @Test public void test_replaceStartDate() { // test case RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(PAY) .accrualSchedule(PeriodicSchedule.builder() .startDate(DATE_01_05) .endDate(DATE_04_05) .frequency(P1M) .businessDayAdjustment(BusinessDayAdjustment.of(FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(P1M) .paymentDateOffset(PLUS_TWO_DAYS) .build()) .notionalSchedule(NotionalSchedule.of(GBP, 1000d)) .calculation(FixedRateCalculation.builder() .dayCount(ACT_365F) .rate(ValueSchedule.of(0.025d)) .build()) .build(); // expected RateCalculationSwapLeg expected = test.toBuilder() .accrualSchedule(PeriodicSchedule.builder() .startDate(DATE_01_02) .startDateBusinessDayAdjustment(BusinessDayAdjustment.NONE) .endDate(DATE_04_05) .frequency(P1M) .stubConvention(SMART_INITIAL) .businessDayAdjustment(BusinessDayAdjustment.of(FOLLOWING, GBLO)) .build()) .build(); // assertion assertThat(test.replaceStartDate(DATE_01_02)).isEqualTo(expected); } //------------------------------------------------------------------------- @Test public void test_resolve_oneAccrualPerPayment_fixedRate() { // test case RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(PAY) .accrualSchedule(PeriodicSchedule.builder() .startDate(DATE_01_05) .endDate(DATE_04_05) .frequency(P1M) .businessDayAdjustment(BusinessDayAdjustment.of(FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(P1M) .paymentDateOffset(PLUS_TWO_DAYS) .build()) .notionalSchedule(NotionalSchedule.of(GBP, 1000d)) .calculation(FixedRateCalculation.builder() .dayCount(ACT_365F) .rate(ValueSchedule.of(0.025d)) .build()) .build(); // expected RatePaymentPeriod rpp1 = RatePaymentPeriod.builder() .paymentDate(DATE_02_07) .accrualPeriods(RateAccrualPeriod.builder() .startDate(DATE_01_06) .endDate(DATE_02_05) .unadjustedStartDate(DATE_01_05) .yearFraction(ACT_365F.yearFraction(DATE_01_06, DATE_02_05)) .rateComputation(FixedRateComputation.of(0.025d)) .build()) .dayCount(ACT_365F) .currency(GBP) .notional(-1000d) .build(); RatePaymentPeriod rpp2 = RatePaymentPeriod.builder() .paymentDate(DATE_03_07) .accrualPeriods(RateAccrualPeriod.builder() .startDate(DATE_02_05) .endDate(DATE_03_05) .yearFraction(ACT_365F.yearFraction(DATE_02_05, DATE_03_05)) .rateComputation(FixedRateComputation.of(0.025d)) .build()) .dayCount(ACT_365F) .currency(GBP) .notional(-1000d) .build(); RatePaymentPeriod rpp3 = RatePaymentPeriod.builder() .paymentDate(DATE_04_09) .accrualPeriods(RateAccrualPeriod.builder() .startDate(DATE_03_05) .endDate(DATE_04_07) .unadjustedEndDate(DATE_04_05) .yearFraction(ACT_365F.yearFraction(DATE_03_05, DATE_04_07)) .rateComputation(FixedRateComputation.of(0.025d)) .build()) .dayCount(ACT_365F) .currency(GBP) .notional(-1000d) .build(); // assertion assertThat(test.resolve(REF_DATA)).isEqualTo(ResolvedSwapLeg.builder() .type(FIXED) .payReceive(PAY) .paymentPeriods(rpp1, rpp2, rpp3) .build()); } @Test public void test_resolve_knownAmountStub() { // test case CurrencyAmount knownAmount = CurrencyAmount.of(GBP, 150d); RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(PAY) .accrualSchedule(PeriodicSchedule.builder() .startDate(DATE_02_03) .endDate(DATE_04_03) .firstRegularStartDate(DATE_02_05) .lastRegularEndDate(DATE_03_05) .frequency(P1M) .stubConvention(StubConvention.BOTH) .businessDayAdjustment(BusinessDayAdjustment.of(FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(P1M) .paymentDateOffset(PLUS_TWO_DAYS) .build()) .notionalSchedule(NotionalSchedule.of(GBP, 1000d)) .calculation(FixedRateCalculation.builder() .dayCount(ACT_365F) .rate(ValueSchedule.of(0.025d)) .initialStub(FixedRateStubCalculation.ofKnownAmount(knownAmount)) .finalStub(FixedRateStubCalculation.ofFixedRate(0.1d)) .build()) .build(); // expected KnownAmountNotionalSwapPaymentPeriod pp1 = KnownAmountNotionalSwapPaymentPeriod.builder() .payment(Payment.of(knownAmount, DATE_02_07)) .startDate(DATE_02_03) .endDate(DATE_02_05) .unadjustedStartDate(DATE_02_03) .notionalAmount(CurrencyAmount.of(GBP, -1000d)) .build(); RatePaymentPeriod rpp2 = RatePaymentPeriod.builder() .paymentDate(DATE_03_07) .accrualPeriods(RateAccrualPeriod.builder() .startDate(DATE_02_05) .endDate(DATE_03_05) .yearFraction(ACT_365F.yearFraction(DATE_02_05, DATE_03_05)) .rateComputation(FixedRateComputation.of(0.025d)) .build()) .dayCount(ACT_365F) .currency(GBP) .notional(-1000d) .build(); RatePaymentPeriod rpp3 = RatePaymentPeriod.builder() .paymentDate(DATE_04_07) .accrualPeriods(RateAccrualPeriod.builder() .startDate(DATE_03_05) .endDate(DATE_04_03) .unadjustedEndDate(DATE_04_03) .yearFraction(ACT_365F.yearFraction(DATE_03_05, DATE_04_03)) .rateComputation(FixedRateComputation.of(0.1d)) .build()) .dayCount(ACT_365F) .currency(GBP) .notional(-1000d) .build(); // assertion assertThat(test.resolve(REF_DATA)).isEqualTo(ResolvedSwapLeg.builder() .type(FIXED) .payReceive(PAY) .paymentPeriods(pp1, rpp2, rpp3) .build()); } @Test public void test_resolve_twoAccrualsPerPayment_iborRate_varyingNotional_notionalExchange() { // test case RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(PAY) .accrualSchedule(PeriodicSchedule.builder() .startDate(DATE_01_05) .endDate(DATE_06_05) .frequency(P1M) .businessDayAdjustment(BusinessDayAdjustment.of(FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(P2M) .paymentDateOffset(PLUS_TWO_DAYS) .compoundingMethod(STRAIGHT) .build()) .notionalSchedule(NotionalSchedule.builder() .currency(GBP) .amount(ValueSchedule.of(1000d, ValueStep.of(1, ValueAdjustment.ofReplace(1500d)))) .initialExchange(true) .intermediateExchange(true) .finalExchange(true) .build()) .calculation(IborRateCalculation.builder() .dayCount(ACT_365F) .index(GBP_LIBOR_1M) .fixingDateOffset(DaysAdjustment.ofBusinessDays(-2, GBLO)) .build()) .build(); // expected RatePaymentPeriod rpp1 = RatePaymentPeriod.builder() .paymentDate(DATE_03_07) .accrualPeriods( RateAccrualPeriod.builder() .startDate(DATE_01_06) .endDate(DATE_02_05) .unadjustedStartDate(DATE_01_05) .yearFraction(ACT_365F.yearFraction(DATE_01_06, DATE_02_05)) .rateComputation(IborRateComputation.of(GBP_LIBOR_1M, DATE_01_02, REF_DATA)) .build(), RateAccrualPeriod.builder() .startDate(DATE_02_05) .endDate(DATE_03_05) .yearFraction(ACT_365F.yearFraction(DATE_02_05, DATE_03_05)) .rateComputation(IborRateComputation.of(GBP_LIBOR_1M, DATE_02_03, REF_DATA)) .build()) .dayCount(ACT_365F) .currency(GBP) .notional(-1000d) .compoundingMethod(STRAIGHT) .build(); RatePaymentPeriod rpp2 = RatePaymentPeriod.builder() .paymentDate(DATE_05_08) .accrualPeriods( RateAccrualPeriod.builder() .startDate(DATE_03_05) .endDate(DATE_04_07) .unadjustedEndDate(DATE_04_05) .yearFraction(ACT_365F.yearFraction(DATE_03_05, DATE_04_07)) .rateComputation(IborRateComputation.of(GBP_LIBOR_1M, DATE_03_03, REF_DATA)) .build(), RateAccrualPeriod.builder() .startDate(DATE_04_07) .endDate(DATE_05_06) .unadjustedStartDate(DATE_04_05) .unadjustedEndDate(DATE_05_05) .yearFraction(ACT_365F.yearFraction(DATE_04_07, DATE_05_06)) .rateComputation(IborRateComputation.of(GBP_LIBOR_1M, DATE_04_03, REF_DATA)) .build()) .dayCount(ACT_365F) .currency(GBP) .notional(-1500d) .compoundingMethod(STRAIGHT) .build(); RatePaymentPeriod rpp3 = RatePaymentPeriod.builder() .paymentDate(DATE_06_09) .accrualPeriods(RateAccrualPeriod.builder() .startDate(DATE_05_06) .endDate(DATE_06_05) .unadjustedStartDate(DATE_05_05) .yearFraction(ACT_365F.yearFraction(DATE_05_06, DATE_06_05)) .rateComputation(IborRateComputation.of(GBP_LIBOR_1M, DATE_05_01, REF_DATA)) .build()) .dayCount(ACT_365F) .currency(GBP) .notional(-1500d) .compoundingMethod(STRAIGHT) .build(); // events (only one intermediate exchange) NotionalExchange nexInitial = NotionalExchange.of(CurrencyAmount.of(GBP, 1000d), DATE_01_06); NotionalExchange nexIntermediate = NotionalExchange.of(CurrencyAmount.of(GBP, 500d), DATE_03_07); NotionalExchange nexFinal = NotionalExchange.of(CurrencyAmount.of(GBP, -1500d), DATE_06_09); // assertion assertThat(test.resolve(REF_DATA)).isEqualTo(ResolvedSwapLeg.builder() .type(IBOR) .payReceive(PAY) .paymentPeriods(rpp1, rpp2, rpp3) .paymentEvents(nexInitial, nexIntermediate, nexFinal) .build()); } @Test public void test_resolve_threeAccrualsPerPayment() { // test case RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(PAY) .accrualSchedule(PeriodicSchedule.builder() .startDate(DATE_01_05) .endDate(DATE_04_05) .frequency(P1M) .businessDayAdjustment(BusinessDayAdjustment.of(FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(P3M) .paymentDateOffset(PLUS_TWO_DAYS) .compoundingMethod(STRAIGHT) .build()) .notionalSchedule(NotionalSchedule.of(GBP, 1000d)) .calculation(FixedRateCalculation.builder() .dayCount(ACT_365F) .rate(ValueSchedule.of(0.025d)) .build()) .build(); // expected RatePaymentPeriod rpp1 = RatePaymentPeriod.builder() .paymentDate(DATE_04_09) .accrualPeriods( RateAccrualPeriod.builder() .startDate(DATE_01_06) .endDate(DATE_02_05) .unadjustedStartDate(DATE_01_05) .yearFraction(ACT_365F.yearFraction(DATE_01_06, DATE_02_05)) .rateComputation(FixedRateComputation.of(0.025d)) .build(), RateAccrualPeriod.builder() .startDate(DATE_02_05) .endDate(DATE_03_05) .yearFraction(ACT_365F.yearFraction(DATE_02_05, DATE_03_05)) .rateComputation(FixedRateComputation.of(0.025d)) .build(), RateAccrualPeriod.builder() .startDate(DATE_03_05) .endDate(DATE_04_07) .unadjustedEndDate(DATE_04_05) .yearFraction(ACT_365F.yearFraction(DATE_03_05, DATE_04_07)) .rateComputation(FixedRateComputation.of(0.025d)) .build()) .dayCount(ACT_365F) .currency(GBP) .notional(-1000d) .compoundingMethod(STRAIGHT) .build(); // assertion assertThat(test.resolve(REF_DATA)).isEqualTo(ResolvedSwapLeg.builder() .type(FIXED) .payReceive(PAY) .paymentPeriods(rpp1) .build()); } //------------------------------------------------------------------------- @Test public void test_resolve_oneAccrualPerPayment_fxReset() { // test case RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(PAY) .accrualSchedule(PeriodicSchedule.builder() .startDate(DATE_01_05) .endDate(DATE_04_05) .frequency(P1M) .businessDayAdjustment(BusinessDayAdjustment.of(FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(P1M) .paymentDateOffset(PLUS_TWO_DAYS) .build()) .notionalSchedule(NotionalSchedule.builder() .currency(GBP) .amount(ValueSchedule.of(1000d)) .fxReset(FxResetCalculation.builder() .referenceCurrency(EUR) .index(EUR_GBP_ECB) .fixingDateOffset(MINUS_TWO_DAYS) .build()) .initialExchange(true) .intermediateExchange(true) .finalExchange(true) .build()) .calculation(FixedRateCalculation.builder() .dayCount(ACT_365F) .rate(ValueSchedule.of(0.025d)) .build()) .build(); // expected RatePaymentPeriod rpp1 = RatePaymentPeriod.builder() .paymentDate(DATE_02_07) .accrualPeriods(RateAccrualPeriod.builder() .startDate(DATE_01_06) .endDate(DATE_02_05) .unadjustedStartDate(DATE_01_05) .yearFraction(ACT_365F.yearFraction(DATE_01_06, DATE_02_05)) .rateComputation(FixedRateComputation.of(0.025d)) .build()) .dayCount(ACT_365F) .currency(GBP) .notional(-1000d) .fxReset(FxReset.of(FxIndexObservation.of(EUR_GBP_ECB, DATE_01_02, REF_DATA), EUR)) .build(); RatePaymentPeriod rpp2 = RatePaymentPeriod.builder() .paymentDate(DATE_03_07) .accrualPeriods(RateAccrualPeriod.builder() .startDate(DATE_02_05) .endDate(DATE_03_05) .yearFraction(ACT_365F.yearFraction(DATE_02_05, DATE_03_05)) .rateComputation(FixedRateComputation.of(0.025d)) .build()) .dayCount(ACT_365F) .currency(GBP) .notional(-1000d) .fxReset(FxReset.of(FxIndexObservation.of(EUR_GBP_ECB, DATE_02_03, REF_DATA), EUR)) .build(); RatePaymentPeriod rpp3 = RatePaymentPeriod.builder() .paymentDate(DATE_04_09) .accrualPeriods(RateAccrualPeriod.builder() .startDate(DATE_03_05) .endDate(DATE_04_07) .unadjustedEndDate(DATE_04_05) .yearFraction(ACT_365F.yearFraction(DATE_03_05, DATE_04_07)) .rateComputation(FixedRateComputation.of(0.025d)) .build()) .dayCount(ACT_365F) .currency(GBP) .notional(-1000d) .fxReset(FxReset.of(FxIndexObservation.of(EUR_GBP_ECB, DATE_03_03, REF_DATA), EUR)) .build(); FxResetNotionalExchange ne1a = FxResetNotionalExchange.of( CurrencyAmount.of(EUR, 1000d), DATE_01_06, FxIndexObservation.of(EUR_GBP_ECB, DATE_01_02, REF_DATA)); FxResetNotionalExchange ne1b = FxResetNotionalExchange.of( CurrencyAmount.of(EUR, -1000d), DATE_02_07, FxIndexObservation.of(EUR_GBP_ECB, DATE_01_02, REF_DATA)); FxResetNotionalExchange ne2a = FxResetNotionalExchange.of( CurrencyAmount.of(EUR, 1000d), DATE_02_07, FxIndexObservation.of(EUR_GBP_ECB, DATE_02_03, REF_DATA)); FxResetNotionalExchange ne2b = FxResetNotionalExchange.of( CurrencyAmount.of(EUR, -1000d), DATE_03_07, FxIndexObservation.of(EUR_GBP_ECB, DATE_02_03, REF_DATA)); FxResetNotionalExchange ne3a = FxResetNotionalExchange.of( CurrencyAmount.of(EUR, 1000d), DATE_03_07, FxIndexObservation.of(EUR_GBP_ECB, DATE_03_03, REF_DATA)); FxResetNotionalExchange ne3b = FxResetNotionalExchange.of( CurrencyAmount.of(EUR, -1000d), DATE_04_09, FxIndexObservation.of(EUR_GBP_ECB, DATE_03_03, REF_DATA)); // assertion assertThat(test.resolve(REF_DATA)).isEqualTo(ResolvedSwapLeg.builder() .type(FIXED) .payReceive(PAY) .paymentPeriods(rpp1, rpp2, rpp3) .paymentEvents(ne1a, ne1b, ne2a, ne2b, ne3a, ne3b) .build()); } //------------------------------------------------------------------------- @Test public void test_inflation_monthly() { BusinessDayAdjustment bda = BusinessDayAdjustment.of(FOLLOWING, GBLO); PeriodicSchedule accrualSchedule = PeriodicSchedule.builder() .startDate(DATE_14_06_09) .endDate(DATE_19_06_09) .frequency(Frequency.ofYears(5)) .businessDayAdjustment(bda) .build(); PaymentSchedule paymentSchedule = PaymentSchedule.builder() .paymentFrequency(Frequency.ofYears(5)) .paymentDateOffset(DaysAdjustment.ofBusinessDays(2, GBLO)) .build(); InflationRateCalculation rateCalc = InflationRateCalculation.builder() .index(GB_RPI) .indexCalculationMethod(MONTHLY) .lag(Period.ofMonths(3)) .build(); NotionalSchedule notionalSchedule = NotionalSchedule.of(GBP, 1000d); RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(PAY) .accrualSchedule(accrualSchedule) .paymentSchedule(paymentSchedule) .notionalSchedule(notionalSchedule) .calculation(rateCalc) .build(); assertThat(test.getStartDate()).isEqualTo(AdjustableDate.of(DATE_14_06_09, bda)); assertThat(test.getEndDate()).isEqualTo(AdjustableDate.of(DATE_19_06_09, bda)); assertThat(test.getCurrency()).isEqualTo(GBP); assertThat(test.getPayReceive()).isEqualTo(PAY); assertThat(test.getAccrualSchedule()).isEqualTo(accrualSchedule); assertThat(test.getPaymentSchedule()).isEqualTo(paymentSchedule); assertThat(test.getNotionalSchedule()).isEqualTo(notionalSchedule); assertThat(test.getCalculation()).isEqualTo(rateCalc); RatePaymentPeriod rpp = RatePaymentPeriod.builder() .paymentDate(DaysAdjustment.ofBusinessDays(2, GBLO).adjust(bda.adjust(DATE_19_06_09, REF_DATA), REF_DATA)) .accrualPeriods(RateAccrualPeriod.builder() .startDate(BusinessDayAdjustment.of(FOLLOWING, GBLO).adjust(DATE_14_06_09, REF_DATA)) .endDate(BusinessDayAdjustment.of(FOLLOWING, GBLO).adjust(DATE_19_06_09, REF_DATA)) .unadjustedStartDate(DATE_14_06_09) .unadjustedEndDate(DATE_19_06_09) .yearFraction(1.0) .rateComputation( InflationMonthlyRateComputation.of( GB_RPI, YearMonth.from(bda.adjust(DATE_14_06_09, REF_DATA)).minusMonths(3), YearMonth.from(bda.adjust(DATE_19_06_09, REF_DATA)).minusMonths(3))) .build()) .dayCount(ONE_ONE) .currency(GBP) .notional(-1000d) .build(); ResolvedSwapLeg expected = ResolvedSwapLeg.builder() .paymentPeriods(rpp) .payReceive(PAY) .type(SwapLegType.INFLATION) .build(); ResolvedSwapLeg testResolved = test.resolve(REF_DATA); assertThat(testResolved).isEqualTo(expected); } @Test public void test_inflation_interpolated() { BusinessDayAdjustment bda = BusinessDayAdjustment.of(FOLLOWING, GBLO); PeriodicSchedule accrualSchedule = PeriodicSchedule.builder() .startDate(DATE_14_06_09) .endDate(DATE_19_06_09) .frequency(Frequency.ofYears(5)) .businessDayAdjustment(bda) .build(); PaymentSchedule paymentSchedule = PaymentSchedule.builder() .paymentFrequency(Frequency.ofYears(5)) .paymentDateOffset(DaysAdjustment.ofBusinessDays(2, GBLO)) .build(); InflationRateCalculation rateCalc = InflationRateCalculation.builder() .index(GB_RPI) .indexCalculationMethod(INTERPOLATED) .lag(Period.ofMonths(3)) .build(); NotionalSchedule notionalSchedule = NotionalSchedule.of(GBP, 1000d); RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(RECEIVE) .accrualSchedule(accrualSchedule) .paymentSchedule(paymentSchedule) .notionalSchedule(notionalSchedule) .calculation(rateCalc) .build(); assertThat(test.getStartDate()).isEqualTo(AdjustableDate.of(DATE_14_06_09, bda)); assertThat(test.getEndDate()).isEqualTo(AdjustableDate.of(DATE_19_06_09, bda)); assertThat(test.getCurrency()).isEqualTo(GBP); assertThat(test.getPayReceive()).isEqualTo(RECEIVE); assertThat(test.getAccrualSchedule()).isEqualTo(accrualSchedule); assertThat(test.getPaymentSchedule()).isEqualTo(paymentSchedule); assertThat(test.getNotionalSchedule()).isEqualTo(notionalSchedule); assertThat(test.getCalculation()).isEqualTo(rateCalc); double weight = 1. - 9.0 / 30.0; RatePaymentPeriod rpp0 = RatePaymentPeriod.builder() .paymentDate(DaysAdjustment.ofBusinessDays(2, GBLO).adjust(bda.adjust(DATE_19_06_09, REF_DATA), REF_DATA)) .accrualPeriods(RateAccrualPeriod.builder() .startDate(bda.adjust(DATE_14_06_09, REF_DATA)) .endDate(bda.adjust(DATE_19_06_09, REF_DATA)) .unadjustedStartDate(DATE_14_06_09) .unadjustedEndDate(DATE_19_06_09) .yearFraction(1.0) .rateComputation( InflationInterpolatedRateComputation.of( GB_RPI, YearMonth.from(bda.adjust(DATE_14_06_09, REF_DATA)).minusMonths(3), YearMonth.from(bda.adjust(DATE_19_06_09, REF_DATA)).minusMonths(3), weight)) .build()) .dayCount(ONE_ONE) .currency(GBP) .notional(1000d) .build(); ResolvedSwapLeg expected = ResolvedSwapLeg.builder() .paymentPeriods(rpp0) .payReceive(RECEIVE) .type(SwapLegType.INFLATION) .build(); ResolvedSwapLeg testExpand = test.resolve(REF_DATA); assertThat(testExpand).isEqualTo(expected); } @Test public void test_inflation_fixed() { BusinessDayAdjustment bda = BusinessDayAdjustment.of(FOLLOWING, GBLO); PeriodicSchedule accrualSchedule = PeriodicSchedule.builder() .startDate(DATE_14_06_09) .endDate(DATE_19_06_09) .frequency(P12M) .businessDayAdjustment(bda) .build(); PaymentSchedule paymentSchedule = PaymentSchedule.builder() .paymentFrequency(Frequency.ofYears(5)) .paymentDateOffset(DaysAdjustment.ofBusinessDays(2, GBLO)) .compoundingMethod(STRAIGHT) .build(); FixedRateCalculation rateCalc = FixedRateCalculation.builder() .rate(ValueSchedule.of(0.05)) .dayCount(ONE_ONE) // year fraction is always 1. .build(); NotionalSchedule notionalSchedule = NotionalSchedule.of(GBP, 1000d); RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(RECEIVE) .accrualSchedule(accrualSchedule) .paymentSchedule(paymentSchedule) .notionalSchedule(notionalSchedule) .calculation(rateCalc) .build(); assertThat(test.getStartDate()).isEqualTo(AdjustableDate.of(DATE_14_06_09, bda)); assertThat(test.getEndDate()).isEqualTo(AdjustableDate.of(DATE_19_06_09, bda)); assertThat(test.getCurrency()).isEqualTo(GBP); assertThat(test.getPayReceive()).isEqualTo(RECEIVE); assertThat(test.getAccrualSchedule()).isEqualTo(accrualSchedule); assertThat(test.getPaymentSchedule()).isEqualTo(paymentSchedule); assertThat(test.getNotionalSchedule()).isEqualTo(notionalSchedule); assertThat(test.getCalculation()).isEqualTo(rateCalc); RateAccrualPeriod rap0 = RateAccrualPeriod.builder() .startDate(bda.adjust(DATE_14_06_09, REF_DATA)) .endDate(bda.adjust(DATE_14_06_09.plusYears(1), REF_DATA)) .unadjustedStartDate(DATE_14_06_09) .unadjustedEndDate(DATE_14_06_09.plusYears(1)) .yearFraction(1.0) .rateComputation(FixedRateComputation.of(0.05)) .build(); RateAccrualPeriod rap1 = RateAccrualPeriod.builder() .startDate(bda.adjust(DATE_14_06_09.plusYears(1), REF_DATA)) .endDate(bda.adjust(DATE_14_06_09.plusYears(2), REF_DATA)) .unadjustedStartDate(DATE_14_06_09.plusYears(1)) .unadjustedEndDate(DATE_14_06_09.plusYears(2)) .yearFraction(1.0) .rateComputation(FixedRateComputation.of(0.05)) .build(); RateAccrualPeriod rap2 = RateAccrualPeriod.builder() .startDate(bda.adjust(DATE_14_06_09.plusYears(2), REF_DATA)) .endDate(bda.adjust(DATE_14_06_09.plusYears(3), REF_DATA)) .unadjustedStartDate(DATE_14_06_09.plusYears(2)) .unadjustedEndDate(DATE_14_06_09.plusYears(3)) .yearFraction(1.0) .rateComputation(FixedRateComputation.of(0.05)) .build(); RateAccrualPeriod rap3 = RateAccrualPeriod.builder() .startDate(bda.adjust(DATE_14_06_09.plusYears(3), REF_DATA)) .endDate(bda.adjust(DATE_14_06_09.plusYears(4), REF_DATA)) .unadjustedStartDate(DATE_14_06_09.plusYears(3)) .unadjustedEndDate(DATE_14_06_09.plusYears(4)) .yearFraction(1.0) .rateComputation(FixedRateComputation.of(0.05)) .build(); RateAccrualPeriod rap4 = RateAccrualPeriod.builder() .startDate(bda.adjust(DATE_14_06_09.plusYears(4), REF_DATA)) .endDate(bda.adjust(DATE_19_06_09, REF_DATA)) .unadjustedStartDate(DATE_14_06_09.plusYears(4)) .unadjustedEndDate(DATE_19_06_09) .yearFraction(1.0) .rateComputation(FixedRateComputation.of(0.05)) .build(); RatePaymentPeriod rpp = RatePaymentPeriod.builder() .paymentDate(DaysAdjustment.ofBusinessDays(2, GBLO).adjust(bda.adjust(DATE_19_06_09, REF_DATA), REF_DATA)) .accrualPeriods(rap0, rap1, rap2, rap3, rap4) .compoundingMethod(STRAIGHT) .dayCount(ONE_ONE) .currency(GBP) .notional(1000d) .build(); ResolvedSwapLeg expected = ResolvedSwapLeg.builder() .paymentPeriods(rpp) .payReceive(RECEIVE) .type(SwapLegType.FIXED) .build(); ResolvedSwapLeg testExpand = test.resolve(REF_DATA); assertThat(testExpand).isEqualTo(expected); } //------------------------------------------------------------------------- @Test public void coverage() { RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(PAY) .accrualSchedule(PeriodicSchedule.builder() .startDate(DATE_01_05) .endDate(DATE_04_05) .frequency(P1M) .businessDayAdjustment(BusinessDayAdjustment.of(FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(P1M) .paymentDateOffset(PLUS_TWO_DAYS) .build()) .notionalSchedule(NotionalSchedule.of(GBP, 1000d)) .calculation(FixedRateCalculation.builder() .dayCount(ACT_365F) .rate(ValueSchedule.of(0.025d)) .build()) .build(); coverImmutableBean(test); RateCalculationSwapLeg test2 = RateCalculationSwapLeg.builder() .payReceive(RECEIVE) .accrualSchedule(PeriodicSchedule.builder() .startDate(DATE_02_05) .endDate(DATE_03_05) .frequency(P1M) .businessDayAdjustment(BusinessDayAdjustment.of(FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(P1M) .paymentDateOffset(PLUS_THREE_DAYS) .build()) .notionalSchedule(NotionalSchedule.of(GBP, 2000d)) .calculation(FixedRateCalculation.builder() .dayCount(ACT_360) .rate(ValueSchedule.of(0.025d)) .build()) .build(); coverBeanEquals(test, test2); } @Test public void test_serialization() { RateCalculationSwapLeg test = RateCalculationSwapLeg.builder() .payReceive(PAY) .accrualSchedule(PeriodicSchedule.builder() .startDate(DATE_01_05) .endDate(DATE_04_05) .frequency(P1M) .businessDayAdjustment(BusinessDayAdjustment.of(FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(P1M) .paymentDateOffset(PLUS_TWO_DAYS) .build()) .notionalSchedule(NotionalSchedule.of(GBP, 1000d)) .calculation(FixedRateCalculation.builder() .dayCount(DayCounts.ACT_365F) .rate(ValueSchedule.of(0.025d)) .build()) .build(); assertSerialization(test); } }
package org.openfs.fileservice.fileseq; import java.io.File; import org.apache.camel.EndpointInject; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.junit4.CamelTestSupport; import org.apache.camel.util.FileUtil; import org.junit.Before; import org.junit.Test; import org.openfs.fileservice.fileseq.FileKeySequencer; public class FileSequencerTest extends CamelTestSupport { @EndpointInject(uri = "mock:result") MockEndpoint resultEndpoint; @EndpointInject(uri = "mock:result1") MockEndpoint resultEndpoint1; @Before public void setUp() throws Exception { super.setUp(); cleanupDir("target/test"); } @Test public void testIncrSeqHeader() throws Exception { context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("direct:in") .transform().method(new FileKeySequencer("target/test"),"getSequence(testkey)") .log("${body}") .to("mock:result"); } }); context.start(); resultEndpoint.expectedBodiesReceived("1","4","8"); template.sendBodyAndHeader("direct:in", "0", "incrValue", 1L); template.sendBodyAndHeader("direct:in", "0", "incrValue", 3L); template.sendBodyAndHeader("direct:in", "0", "incrValue", 4L); resultEndpoint.assertIsSatisfied(); } @Test public void testFormatvalueFileSeqArgs() throws Exception { context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { FileKeySequencer seq = new FileKeySequencer(); seq.setFileStore("target/test"); seq.setMinValue(9998L); seq.setMaxValue(9999L); from("direct:in") .transform().method(seq,"formatSequence(testkey)") .log("${body}") .to("mock:result"); } }); context.start(); resultEndpoint.expectedBodiesReceived("9998","9999","9998","9999"); template.sendBody("direct:in", "0"); template.sendBody("direct:in", "0"); template.sendBody("direct:in", "0"); template.sendBody("direct:in", "0"); resultEndpoint.assertIsSatisfied(); } @Test public void testFormatFileSeqArgs() throws Exception { context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("direct:in") .transform().method(new FileKeySequencer("target/test"),"formatSequence(testkey)") .log("${body}") .to("mock:result"); } }); context.start(); resultEndpoint.expectedBodiesReceived("0000","0001","0002"); template.sendBody("direct:in", "0"); template.sendBody("direct:in", "0"); template.sendBody("direct:in", "0"); resultEndpoint.assertIsSatisfied(); } @Test public void testFormatFileSeqFormatted() throws Exception { FileKeySequencer myBean = new FileKeySequencer("target/test"); myBean.setFormat("%08d"); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("direct:in") .transform().method(myBean,"formatSequence") .log("${body}") .to("mock:result"); } }); context.start(); resultEndpoint.expectedBodiesReceived("00000000","00000001","00000002"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); resultEndpoint.assertIsSatisfied(); } @Test public void testFormatFileSeqMax() throws Exception { FileKeySequencer myBean = new FileKeySequencer("target/test"); myBean.setFormat("%d"); myBean.setMaxValue(2L); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("direct:in") .transform().method(myBean,"formatSequence") .log("${body}") .to("mock:result"); } }); context.start(); resultEndpoint.expectedBodiesReceived("0","1","2","0"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); resultEndpoint.assertIsSatisfied(); } @Test public void testFormatFileSeqMin() throws Exception { FileKeySequencer myBean = new FileKeySequencer("target/test"); myBean.setFormat("%d"); myBean.setMinValue(2L); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("direct:in") .transform().method(myBean,"formatSequence") .log("${body}") .to("mock:result"); } }); context.start(); resultEndpoint.expectedBodiesReceived("2","3","4","5"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); resultEndpoint.assertIsSatisfied(); } @Test public void testFormatFileSeqHeader() throws Exception { context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("direct:in") .transform().method(new FileKeySequencer("target/test"),"formatSequence") .log("${body}") .to("mock:result"); } }); context.start(); resultEndpoint.expectedBodiesReceived("0000","0001","0002"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); resultEndpoint.assertIsSatisfied(); } @Test public void testFormatFileSeqHeader2() throws Exception { context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("direct:in") .transform().method(new FileKeySequencer("target/test"),"formatSequence") .log("${body}") .to("mock:result"); from("seda:in1") .transform().method(new FileKeySequencer("target/test"),"formatSequence") .log("${body}") .to("mock:result1"); } }); context.start(); resultEndpoint.expectedBodiesReceived("0000","0001"); resultEndpoint1.expectedBodiesReceived("0000","0001"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); template.sendBodyAndHeader("direct:in", "0", "keyName", "testKey"); template.sendBodyAndHeader("seda:in1", "0", "keyName", "testKey1"); template.sendBodyAndHeader("seda:in1", "0", "keyName", "testKey1"); resultEndpoint.assertIsSatisfied(); resultEndpoint1.assertIsSatisfied(); } protected void cleanupDir (String dirname) { File d = new File(dirname); if (d.exists() && d.isDirectory()) FileUtil.removeDir(d); } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.java.refactoring; import com.intellij.JavaTestUtil; import com.intellij.codeInsight.TargetElementUtil; import com.intellij.codeInsight.daemon.ImplicitUsageProvider; import com.intellij.ide.scratch.ScratchFileService; import com.intellij.ide.scratch.ScratchRootType; import com.intellij.lang.java.JavaLanguage; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.roots.LanguageLevelProjectExtension; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiNamedElement; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.refactoring.BaseRefactoringProcessor; import com.intellij.refactoring.MultiFileTestCase; import com.intellij.refactoring.RefactoringSettings; import com.intellij.refactoring.safeDelete.SafeDeleteHandler; import com.intellij.testFramework.IdeaTestUtil; import com.intellij.testFramework.PsiTestUtil; import com.intellij.util.PathUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.jps.model.java.JavaSourceRootType; import org.jetbrains.jps.model.java.JpsJavaExtensionService; public class SafeDeleteTest extends MultiFileTestCase { @NotNull @Override protected String getTestDataPath() { return JavaTestUtil.getJavaTestDataPath(); } @NotNull @Override protected String getTestRoot() { return "/refactoring/safeDelete/"; } public void testImplicitCtrCall() { try { doTest("Super"); fail(); } catch (BaseRefactoringProcessor.ConflictsInTestsException e) { String message = e.getMessage(); assertTrue(message, message.startsWith("constructor <b><code>Super.Super()</code></b> has 1 usage that is not safe to delete")); } } public void testImplicitCtrCall2() { try { doTest("Super"); fail(); } catch (BaseRefactoringProcessor.ConflictsInTestsException e) { String message = e.getMessage(); assertTrue(message, message.startsWith("constructor <b><code>Super.Super()</code></b> has 1 usage that is not safe to delete")); } } public void testMultipleInterfacesImplementation() { doTest("IFoo"); } public void testMultipleInterfacesImplementationThroughCommonInterface() { doTest("IFoo"); } public void testUsageInExtendsList() throws Exception { doSingleFileTest(); } public void testDeepDeleteParameterSimple() throws Exception { doSingleFileTest(); } public void testDeepDeleteParameterOtherTypeInBinaryExpression() throws Exception { doSingleFileTest(); } public void testDeepDeleteFieldAndAssignedParameter() throws Exception { doSingleFileTest(); } public void testImpossibleToDeepDeleteParameter() throws Exception { doSingleFileTest(); } public void testNoDeepDeleteParameterUsedInCallQualifier() throws Exception { doSingleFileTest(); } public void testNoDeepDeleteParameterUsedInNextArgumentExpression() throws Exception { doSingleFileTest(); } public void testToDeepDeleteParameterOverriders() throws Exception { doSingleFileTest(); } public void testDeleteParameterOfASiblingMethod() throws Exception { doSingleFileTest(); } public void testDeleteMethodCascade() throws Exception { doSingleFileTest(); } public void testDeleteMethodKeepEnumValues() throws Exception { doSingleFileTest(); } public void testDeleteMethodCascadeRecursive() throws Exception { doSingleFileTest(); } public void testDeleteMethodCascadeOverridden() throws Exception { doSingleFileTest(); } public void testDeleteParameterAndUpdateJavadocRef() throws Exception { doSingleFileTest(); } public void testDeleteConstructorParameterWithAnonymousClassUsage() throws Exception { doSingleFileTest(); } public void testDeleteMethodWithPropertyUsage() { doTest("Foo"); } public void testDeleteClassWithPropertyUsage() { doTest("Foo"); } public void testDeleteMethodWithoutPropertyUsage() { ImplicitUsageProvider.EP_NAME.getPoint().registerExtension(new ImplicitUsageProvider() { @Override public boolean isImplicitUsage(@NotNull PsiElement element) { return element instanceof PsiNamedElement && ((PsiNamedElement)element).getName().equals("a.b.c"); } @Override public boolean isImplicitRead(@NotNull PsiElement element) { return false; } @Override public boolean isImplicitWrite(@NotNull PsiElement element) { return false; } }, getTestRootDisposable()); doTest("Foo"); } public void testParameterInHierarchy() { doTest("C2"); } public void testTopLevelDocComment() { doTest("foo.C1"); } public void testOverloadedMethods() { doTest("foo.A"); } public void testTopParameterInHierarchy() { doTest("I"); } public void testExtendsList() { doTest("B"); } public void testJavadocParamRef() { doTest("Super"); } public void testEnumConstructorParameter() { doTest("UserFlags"); } public void testSafeDeleteStaticImports() { doTest("A"); } public void testSafeDeleteImports() { doTest("B"); } public void testSafeDeleteImportsOnInnerClasses() { doTest("p.B"); } public void testRemoveOverridersInspiteOfUnsafeUsages() { BaseRefactoringProcessor.ConflictsInTestsException.withIgnoredConflicts(()->doTest("A")); } public void testLocalVariable() { doTest("Super"); } public void testOverrideAnnotation() { doTest("Super"); } public void testSuperCall() { try { doTest("Super"); fail("Conflict was not detected"); } catch (BaseRefactoringProcessor.ConflictsInTestsException e) { String message = e.getMessage(); assertEquals("method <b><code>Super.foo()</code></b> has 1 usage that is not safe to delete.", message); } } public void testParameterFromFunctionalInterface() throws Exception { try { LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_8); doSingleFileTest(); fail("Conflict was not detected"); } catch (BaseRefactoringProcessor.ConflictsInTestsException e) { String message = e.getMessage(); assertEquals("interface <b><code>SAM</code></b> has 1 usage that is not safe to delete.", message); } } public void testFunctionalInterfaceMethod() throws Exception { LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_8); doSingleFileTest(); } public void testAmbiguityAfterParameterDelete() throws Exception { try { doSingleFileTest(); fail("Conflict was not detected"); } catch (BaseRefactoringProcessor.ConflictsInTestsException e) { String message = e.getMessage(); assertEquals("Method foo() is already defined in the class <b><code>Test</code></b>", message); } } public void testFunctionalInterfaceDefaultMethod() throws Exception { LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_8); doSingleFileTest(); } public void testMethodDeepHierarchy() { doTest("Super"); } public void testInterfaceAsTypeParameterBound() throws Exception { doSingleFileTest(); } public void testNestedTypeParameterBounds() throws Exception { doSingleFileTest(); } public void testTypeParameterWithoutOwner() throws Exception { doSingleFileTest(); } public void testLocalVariableSideEffect() { try { doTest("Super"); fail("Side effect was ignored"); } catch (BaseRefactoringProcessor.ConflictsInTestsException e) { String message = e.getMessage(); assertEquals("local variable <b><code>varName</code></b> has 1 usage that is not safe to delete.", message); } } public void testParameterSideEffect() { try { doTest("Super"); fail("Side effect was ignored"); } catch (BaseRefactoringProcessor.ConflictsInTestsException e) { String message = e.getMessage(); assertEquals("parameter <b><code>i</code></b> has 1 usage that is not safe to delete.", message); } } public void testUsageInGenerated() { doTest("A"); } public void testLastResourceVariable() throws Exception { LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_7); doSingleFileTest(); } public void testLastResourceVariableConflictingVar() throws Exception { LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_7); doSingleFileTest(); } public void testLastResourceVariableWithFinallyBlock() throws Exception { LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_7); doSingleFileTest(); } public void testLastTypeParam() throws Exception { LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_7); doSingleFileTest(); } public void testTypeParamFromDiamond() throws Exception { LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_7); doSingleFileTest(); } public void testStripOverride() throws Exception { doSingleFileTest(); } public void testEmptyIf() throws Exception { doSingleFileTest(); } public void testTypeParameterWithinMethodHierarchy() throws Exception { doSingleFileTest(); } public void testTypeParameterNoMethodHierarchy() throws Exception { doSingleFileTest(); } public void testClassWithInnerStaticImport() { doTest("ClassWithInnerStaticImport"); } public void testInnerClassUsedInTheSameFile() throws Exception { try { doSingleFileTest(); fail("Side effect was ignored"); } catch (BaseRefactoringProcessor.ConflictsInTestsException e) { String message = e.getMessage(); assertEquals("class <b><code>Test.Foo</code></b> has 1 usage that is not safe to delete.", message); } } public void testConflictInInheritor() throws Exception { try { doSingleFileTest(); fail("Side effect was ignored"); } catch (BaseRefactoringProcessor.ConflictsInTestsException e) { String message = e.getMessage(); assertEquals("Method foo() is already defined in the class <b><code>B</code></b>", message); } } public void testParameterInMethodUsedInMethodReference() throws Exception { LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_8); BaseRefactoringProcessor.ConflictsInTestsException.withIgnoredConflicts(()->doSingleFileTest()); } public void testNoConflictOnDeleteParameterWithMethodRefArg() throws Exception { LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_8); doSingleFileTest(); } public void testShowConflictsButRemoveAnnotationsIfAnnotationTypeIsDeleted() throws Exception { BaseRefactoringProcessor.ConflictsInTestsException.withIgnoredConflicts(()->doSingleFileTest()); } public void testUsagesInScratch() throws Exception { BaseRefactoringProcessor.runWithDisabledPreview(() -> { VirtualFile scratchFile = ScratchRootType.getInstance() .createScratchFile(getProject(), PathUtil.makeFileName("jScratch", "java"), JavaLanguage.INSTANCE, "class jScratch {{//name()\n}}", ScratchFileService.Option.create_if_missing); RefactoringSettings settings = RefactoringSettings.getInstance(); boolean oldCommentsOption = settings.SAFE_DELETE_SEARCH_IN_COMMENTS; try { settings.SAFE_DELETE_SEARCH_IN_COMMENTS = true; doSingleFileTest(); } finally { settings.SAFE_DELETE_SEARCH_IN_COMMENTS = oldCommentsOption; WriteAction.run(() -> scratchFile.delete(this)); } }); } public void testDeepDeleteFieldAndInitializerMethod() throws Exception { doSingleFileTest(); } public void testDeleteMethodCascadeWithField() throws Exception { doSingleFileTest(); } public void testForInitExpr() throws Exception { doSingleFileTest(); } public void testForInitList() throws Exception { doSingleFileTest(); } public void testForUpdateExpr() throws Exception { doSingleFileTest(); } public void testForUpdateList() throws Exception { doSingleFileTest(); } public void testUpdateContractOnParameterRemoval() throws Exception { doSingleFileTest(); } public void testSealedParent() throws Exception { LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_16_PREVIEW); doSingleFileTest(); } public void testSealedGrandParent() { IdeaTestUtil.withLevel(getModule(), LanguageLevel.JDK_16_PREVIEW, () -> doTest("Parent")); } public void testRecordImplementsInterface() throws Exception { IdeaTestUtil.setModuleLanguageLevel(getModule(), LanguageLevel.JDK_16, getTestRootDisposable()); doSingleFileTest(); } public void testNonAccessibleGrandParent() { try { doTest("foo.Parent"); fail("Conflict was not detected"); } catch (BaseRefactoringProcessor.ConflictsInTestsException e) { String message = e.getMessage(); assertEquals("class <b><code>foo.Parent</code></b> has 1 usage that is not safe to delete.", message); } } public void testLastClassInPackage() { LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_9); doTest("pack1.First"); } public void testNotLastClassInPackage() { LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_9); doTest("pack1.First"); } private void doTest(@NonNls final String qClassName) { doTest((rootDir, rootAfter) -> this.performAction(qClassName)); } @Override protected void prepareProject(VirtualFile rootDir) { VirtualFile src = rootDir.findChild("src"); if (src == null) { super.prepareProject(rootDir); } else { PsiTestUtil.addContentRoot(myModule, rootDir); PsiTestUtil.addSourceRoot(myModule, src); } VirtualFile gen = rootDir.findChild("gen"); if (gen != null) { PsiTestUtil.addSourceRoot(myModule, gen, JavaSourceRootType.SOURCE, JpsJavaExtensionService.getInstance().createSourceRootProperties("", true)); } } private void doSingleFileTest() throws Exception { configureByFile(getTestRoot() + getTestName(false) + ".java"); performAction(); checkResultByFile(getTestRoot() + getTestName(false) + "_after.java"); } private void performAction(final String qClassName) { final PsiClass aClass = myJavaFacade.findClass(qClassName, GlobalSearchScope.allScope(getProject())); assertNotNull("Class " + qClassName + " not found", aClass); configureByExistingFile(aClass.getContainingFile().getVirtualFile()); if (myEditor.getCaretModel().getOffset() == 0) { myEditor.getCaretModel().moveToOffset(aClass.getTextOffset()); } performAction(); } private void performAction() { final PsiElement psiElement = TargetElementUtil .findTargetElement(myEditor, TargetElementUtil.ELEMENT_NAME_ACCEPTED | TargetElementUtil.REFERENCED_ELEMENT_ACCEPTED); assertNotNull("No element found in text:\n" + getFile().getText(), psiElement); SafeDeleteHandler.invoke(getProject(), new PsiElement[]{psiElement}, true); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2019.01.11 at 02:39:34 PM EST // package schemas.docbook; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlElementRefs; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlIDREF; import javax.xml.bind.annotation.XmlMixed; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;choice maxOccurs="unbounded" minOccurs="0"> * &lt;element ref="{http://docbook.org/ns/docbook}inlinemediaobject"/> * &lt;element ref="{http://docbook.org/ns/docbook}remark"/> * &lt;element ref="{http://docbook.org/ns/docbook}superscript"/> * &lt;element ref="{http://docbook.org/ns/docbook}subscript"/> * &lt;element ref="{http://docbook.org/ns/docbook}xref"/> * &lt;element ref="{http://docbook.org/ns/docbook}link"/> * &lt;element ref="{http://docbook.org/ns/docbook}olink"/> * &lt;element ref="{http://docbook.org/ns/docbook}anchor"/> * &lt;element ref="{http://docbook.org/ns/docbook}biblioref"/> * &lt;element ref="{http://docbook.org/ns/docbook}alt"/> * &lt;element ref="{http://docbook.org/ns/docbook}annotation"/> * &lt;element ref="{http://docbook.org/ns/docbook}indexterm"/> * &lt;element ref="{http://docbook.org/ns/docbook}phrase"/> * &lt;element ref="{http://docbook.org/ns/docbook}replaceable"/> * &lt;/choice> * &lt;attGroup ref="{http://docbook.org/ns/docbook}db.common.attributes"/> * &lt;attGroup ref="{http://docbook.org/ns/docbook}db.common.linking.attributes"/> * &lt;attribute name="role" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "content" }) @XmlRootElement(name = "fax") public class Fax { @XmlElementRefs({ @XmlElementRef(name = "link", namespace = "http://docbook.org/ns/docbook", type = Link.class, required = false), @XmlElementRef(name = "phrase", namespace = "http://docbook.org/ns/docbook", type = Phrase.class, required = false), @XmlElementRef(name = "anchor", namespace = "http://docbook.org/ns/docbook", type = Anchor.class, required = false), @XmlElementRef(name = "alt", namespace = "http://docbook.org/ns/docbook", type = Alt.class, required = false), @XmlElementRef(name = "indexterm", namespace = "http://docbook.org/ns/docbook", type = Indexterm.class, required = false), @XmlElementRef(name = "remark", namespace = "http://docbook.org/ns/docbook", type = Remark.class, required = false), @XmlElementRef(name = "xref", namespace = "http://docbook.org/ns/docbook", type = Xref.class, required = false), @XmlElementRef(name = "inlinemediaobject", namespace = "http://docbook.org/ns/docbook", type = Inlinemediaobject.class, required = false), @XmlElementRef(name = "olink", namespace = "http://docbook.org/ns/docbook", type = Olink.class, required = false), @XmlElementRef(name = "biblioref", namespace = "http://docbook.org/ns/docbook", type = Biblioref.class, required = false), @XmlElementRef(name = "annotation", namespace = "http://docbook.org/ns/docbook", type = Annotation.class, required = false), @XmlElementRef(name = "replaceable", namespace = "http://docbook.org/ns/docbook", type = Replaceable.class, required = false), @XmlElementRef(name = "superscript", namespace = "http://docbook.org/ns/docbook", type = Superscript.class, required = false), @XmlElementRef(name = "subscript", namespace = "http://docbook.org/ns/docbook", type = Subscript.class, required = false) }) @XmlMixed protected List<Object> content; @XmlAttribute(name = "role") @XmlSchemaType(name = "anySimpleType") protected String role; @XmlAttribute(name = "id", namespace = "http://www.w3.org/XML/1998/namespace") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute(name = "version") @XmlSchemaType(name = "anySimpleType") protected String commonVersion; @XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace") @XmlSchemaType(name = "anySimpleType") protected String xmlLang; @XmlAttribute(name = "base", namespace = "http://www.w3.org/XML/1998/namespace") @XmlSchemaType(name = "anySimpleType") protected String base; @XmlAttribute(name = "remap") @XmlSchemaType(name = "anySimpleType") protected String remap; @XmlAttribute(name = "xreflabel") @XmlSchemaType(name = "anySimpleType") protected String xreflabel; @XmlAttribute(name = "revisionflag") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String revisionflag; @XmlAttribute(name = "dir") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String dir; @XmlAttribute(name = "arch") @XmlSchemaType(name = "anySimpleType") protected String arch; @XmlAttribute(name = "audience") @XmlSchemaType(name = "anySimpleType") protected String audience; @XmlAttribute(name = "condition") @XmlSchemaType(name = "anySimpleType") protected String condition; @XmlAttribute(name = "conformance") @XmlSchemaType(name = "anySimpleType") protected String conformance; @XmlAttribute(name = "os") @XmlSchemaType(name = "anySimpleType") protected String os; @XmlAttribute(name = "revision") @XmlSchemaType(name = "anySimpleType") protected String commonRevision; @XmlAttribute(name = "security") @XmlSchemaType(name = "anySimpleType") protected String security; @XmlAttribute(name = "userlevel") @XmlSchemaType(name = "anySimpleType") protected String userlevel; @XmlAttribute(name = "vendor") @XmlSchemaType(name = "anySimpleType") protected String vendor; @XmlAttribute(name = "wordsize") @XmlSchemaType(name = "anySimpleType") protected String wordsize; @XmlAttribute(name = "annotations") @XmlSchemaType(name = "anySimpleType") protected String annotations; @XmlAttribute(name = "linkend") @XmlIDREF @XmlSchemaType(name = "IDREF") protected Object linkend; @XmlAttribute(name = "href", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String href; @XmlAttribute(name = "type", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkType; @XmlAttribute(name = "role", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkRole; @XmlAttribute(name = "arcrole", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String arcrole; @XmlAttribute(name = "title", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkTitle; @XmlAttribute(name = "show", namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String show; @XmlAttribute(name = "actuate", namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String actuate; /** * Gets the value of the content property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the content property. * * <p> * For example, to add a new item, do as follows: * <pre> * getContent().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Link } * {@link Phrase } * {@link Anchor } * {@link Alt } * {@link Indexterm } * {@link Remark } * {@link Xref } * {@link Inlinemediaobject } * {@link Olink } * {@link Biblioref } * {@link Annotation } * {@link Replaceable } * {@link String } * {@link Superscript } * {@link Subscript } * * */ public List<Object> getContent() { if (content == null) { content = new ArrayList<Object>(); } return this.content; } /** * Gets the value of the role property. * * @return * possible object is * {@link String } * */ public String getRole() { return role; } /** * Sets the value of the role property. * * @param value * allowed object is * {@link String } * */ public void setRole(String value) { this.role = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the commonVersion property. * * @return * possible object is * {@link String } * */ public String getCommonVersion() { return commonVersion; } /** * Sets the value of the commonVersion property. * * @param value * allowed object is * {@link String } * */ public void setCommonVersion(String value) { this.commonVersion = value; } /** * Gets the value of the xmlLang property. * * @return * possible object is * {@link String } * */ public String getXmlLang() { return xmlLang; } /** * Sets the value of the xmlLang property. * * @param value * allowed object is * {@link String } * */ public void setXmlLang(String value) { this.xmlLang = value; } /** * Gets the value of the base property. * * @return * possible object is * {@link String } * */ public String getBase() { return base; } /** * Sets the value of the base property. * * @param value * allowed object is * {@link String } * */ public void setBase(String value) { this.base = value; } /** * Gets the value of the remap property. * * @return * possible object is * {@link String } * */ public String getRemap() { return remap; } /** * Sets the value of the remap property. * * @param value * allowed object is * {@link String } * */ public void setRemap(String value) { this.remap = value; } /** * Gets the value of the xreflabel property. * * @return * possible object is * {@link String } * */ public String getXreflabel() { return xreflabel; } /** * Sets the value of the xreflabel property. * * @param value * allowed object is * {@link String } * */ public void setXreflabel(String value) { this.xreflabel = value; } /** * Gets the value of the revisionflag property. * * @return * possible object is * {@link String } * */ public String getRevisionflag() { return revisionflag; } /** * Sets the value of the revisionflag property. * * @param value * allowed object is * {@link String } * */ public void setRevisionflag(String value) { this.revisionflag = value; } /** * Gets the value of the dir property. * * @return * possible object is * {@link String } * */ public String getDir() { return dir; } /** * Sets the value of the dir property. * * @param value * allowed object is * {@link String } * */ public void setDir(String value) { this.dir = value; } /** * Gets the value of the arch property. * * @return * possible object is * {@link String } * */ public String getArch() { return arch; } /** * Sets the value of the arch property. * * @param value * allowed object is * {@link String } * */ public void setArch(String value) { this.arch = value; } /** * Gets the value of the audience property. * * @return * possible object is * {@link String } * */ public String getAudience() { return audience; } /** * Sets the value of the audience property. * * @param value * allowed object is * {@link String } * */ public void setAudience(String value) { this.audience = value; } /** * Gets the value of the condition property. * * @return * possible object is * {@link String } * */ public String getCondition() { return condition; } /** * Sets the value of the condition property. * * @param value * allowed object is * {@link String } * */ public void setCondition(String value) { this.condition = value; } /** * Gets the value of the conformance property. * * @return * possible object is * {@link String } * */ public String getConformance() { return conformance; } /** * Sets the value of the conformance property. * * @param value * allowed object is * {@link String } * */ public void setConformance(String value) { this.conformance = value; } /** * Gets the value of the os property. * * @return * possible object is * {@link String } * */ public String getOs() { return os; } /** * Sets the value of the os property. * * @param value * allowed object is * {@link String } * */ public void setOs(String value) { this.os = value; } /** * Gets the value of the commonRevision property. * * @return * possible object is * {@link String } * */ public String getCommonRevision() { return commonRevision; } /** * Sets the value of the commonRevision property. * * @param value * allowed object is * {@link String } * */ public void setCommonRevision(String value) { this.commonRevision = value; } /** * Gets the value of the security property. * * @return * possible object is * {@link String } * */ public String getSecurity() { return security; } /** * Sets the value of the security property. * * @param value * allowed object is * {@link String } * */ public void setSecurity(String value) { this.security = value; } /** * Gets the value of the userlevel property. * * @return * possible object is * {@link String } * */ public String getUserlevel() { return userlevel; } /** * Sets the value of the userlevel property. * * @param value * allowed object is * {@link String } * */ public void setUserlevel(String value) { this.userlevel = value; } /** * Gets the value of the vendor property. * * @return * possible object is * {@link String } * */ public String getVendor() { return vendor; } /** * Sets the value of the vendor property. * * @param value * allowed object is * {@link String } * */ public void setVendor(String value) { this.vendor = value; } /** * Gets the value of the wordsize property. * * @return * possible object is * {@link String } * */ public String getWordsize() { return wordsize; } /** * Sets the value of the wordsize property. * * @param value * allowed object is * {@link String } * */ public void setWordsize(String value) { this.wordsize = value; } /** * Gets the value of the annotations property. * * @return * possible object is * {@link String } * */ public String getAnnotations() { return annotations; } /** * Sets the value of the annotations property. * * @param value * allowed object is * {@link String } * */ public void setAnnotations(String value) { this.annotations = value; } /** * Gets the value of the linkend property. * * @return * possible object is * {@link Object } * */ public Object getLinkend() { return linkend; } /** * Sets the value of the linkend property. * * @param value * allowed object is * {@link Object } * */ public void setLinkend(Object value) { this.linkend = value; } /** * Gets the value of the href property. * * @return * possible object is * {@link String } * */ public String getHref() { return href; } /** * Sets the value of the href property. * * @param value * allowed object is * {@link String } * */ public void setHref(String value) { this.href = value; } /** * Gets the value of the xlinkType property. * * @return * possible object is * {@link String } * */ public String getXlinkType() { return xlinkType; } /** * Sets the value of the xlinkType property. * * @param value * allowed object is * {@link String } * */ public void setXlinkType(String value) { this.xlinkType = value; } /** * Gets the value of the xlinkRole property. * * @return * possible object is * {@link String } * */ public String getXlinkRole() { return xlinkRole; } /** * Sets the value of the xlinkRole property. * * @param value * allowed object is * {@link String } * */ public void setXlinkRole(String value) { this.xlinkRole = value; } /** * Gets the value of the arcrole property. * * @return * possible object is * {@link String } * */ public String getArcrole() { return arcrole; } /** * Sets the value of the arcrole property. * * @param value * allowed object is * {@link String } * */ public void setArcrole(String value) { this.arcrole = value; } /** * Gets the value of the xlinkTitle property. * * @return * possible object is * {@link String } * */ public String getXlinkTitle() { return xlinkTitle; } /** * Sets the value of the xlinkTitle property. * * @param value * allowed object is * {@link String } * */ public void setXlinkTitle(String value) { this.xlinkTitle = value; } /** * Gets the value of the show property. * * @return * possible object is * {@link String } * */ public String getShow() { return show; } /** * Sets the value of the show property. * * @param value * allowed object is * {@link String } * */ public void setShow(String value) { this.show = value; } /** * Gets the value of the actuate property. * * @return * possible object is * {@link String } * */ public String getActuate() { return actuate; } /** * Sets the value of the actuate property. * * @param value * allowed object is * {@link String } * */ public void setActuate(String value) { this.actuate = value; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Describes the security group that is referenced in the security group rule. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ReferencedSecurityGroup" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ReferencedSecurityGroup implements Serializable, Cloneable { /** * <p> * The ID of the security group. * </p> */ private String groupId; /** * <p> * The status of a VPC peering connection, if applicable. * </p> */ private String peeringStatus; /** * <p> * The Amazon Web Services account ID. * </p> */ private String userId; /** * <p> * The ID of the VPC. * </p> */ private String vpcId; /** * <p> * The ID of the VPC peering connection. * </p> */ private String vpcPeeringConnectionId; /** * <p> * The ID of the security group. * </p> * * @param groupId * The ID of the security group. */ public void setGroupId(String groupId) { this.groupId = groupId; } /** * <p> * The ID of the security group. * </p> * * @return The ID of the security group. */ public String getGroupId() { return this.groupId; } /** * <p> * The ID of the security group. * </p> * * @param groupId * The ID of the security group. * @return Returns a reference to this object so that method calls can be chained together. */ public ReferencedSecurityGroup withGroupId(String groupId) { setGroupId(groupId); return this; } /** * <p> * The status of a VPC peering connection, if applicable. * </p> * * @param peeringStatus * The status of a VPC peering connection, if applicable. */ public void setPeeringStatus(String peeringStatus) { this.peeringStatus = peeringStatus; } /** * <p> * The status of a VPC peering connection, if applicable. * </p> * * @return The status of a VPC peering connection, if applicable. */ public String getPeeringStatus() { return this.peeringStatus; } /** * <p> * The status of a VPC peering connection, if applicable. * </p> * * @param peeringStatus * The status of a VPC peering connection, if applicable. * @return Returns a reference to this object so that method calls can be chained together. */ public ReferencedSecurityGroup withPeeringStatus(String peeringStatus) { setPeeringStatus(peeringStatus); return this; } /** * <p> * The Amazon Web Services account ID. * </p> * * @param userId * The Amazon Web Services account ID. */ public void setUserId(String userId) { this.userId = userId; } /** * <p> * The Amazon Web Services account ID. * </p> * * @return The Amazon Web Services account ID. */ public String getUserId() { return this.userId; } /** * <p> * The Amazon Web Services account ID. * </p> * * @param userId * The Amazon Web Services account ID. * @return Returns a reference to this object so that method calls can be chained together. */ public ReferencedSecurityGroup withUserId(String userId) { setUserId(userId); return this; } /** * <p> * The ID of the VPC. * </p> * * @param vpcId * The ID of the VPC. */ public void setVpcId(String vpcId) { this.vpcId = vpcId; } /** * <p> * The ID of the VPC. * </p> * * @return The ID of the VPC. */ public String getVpcId() { return this.vpcId; } /** * <p> * The ID of the VPC. * </p> * * @param vpcId * The ID of the VPC. * @return Returns a reference to this object so that method calls can be chained together. */ public ReferencedSecurityGroup withVpcId(String vpcId) { setVpcId(vpcId); return this; } /** * <p> * The ID of the VPC peering connection. * </p> * * @param vpcPeeringConnectionId * The ID of the VPC peering connection. */ public void setVpcPeeringConnectionId(String vpcPeeringConnectionId) { this.vpcPeeringConnectionId = vpcPeeringConnectionId; } /** * <p> * The ID of the VPC peering connection. * </p> * * @return The ID of the VPC peering connection. */ public String getVpcPeeringConnectionId() { return this.vpcPeeringConnectionId; } /** * <p> * The ID of the VPC peering connection. * </p> * * @param vpcPeeringConnectionId * The ID of the VPC peering connection. * @return Returns a reference to this object so that method calls can be chained together. */ public ReferencedSecurityGroup withVpcPeeringConnectionId(String vpcPeeringConnectionId) { setVpcPeeringConnectionId(vpcPeeringConnectionId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getGroupId() != null) sb.append("GroupId: ").append(getGroupId()).append(","); if (getPeeringStatus() != null) sb.append("PeeringStatus: ").append(getPeeringStatus()).append(","); if (getUserId() != null) sb.append("UserId: ").append(getUserId()).append(","); if (getVpcId() != null) sb.append("VpcId: ").append(getVpcId()).append(","); if (getVpcPeeringConnectionId() != null) sb.append("VpcPeeringConnectionId: ").append(getVpcPeeringConnectionId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ReferencedSecurityGroup == false) return false; ReferencedSecurityGroup other = (ReferencedSecurityGroup) obj; if (other.getGroupId() == null ^ this.getGroupId() == null) return false; if (other.getGroupId() != null && other.getGroupId().equals(this.getGroupId()) == false) return false; if (other.getPeeringStatus() == null ^ this.getPeeringStatus() == null) return false; if (other.getPeeringStatus() != null && other.getPeeringStatus().equals(this.getPeeringStatus()) == false) return false; if (other.getUserId() == null ^ this.getUserId() == null) return false; if (other.getUserId() != null && other.getUserId().equals(this.getUserId()) == false) return false; if (other.getVpcId() == null ^ this.getVpcId() == null) return false; if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false) return false; if (other.getVpcPeeringConnectionId() == null ^ this.getVpcPeeringConnectionId() == null) return false; if (other.getVpcPeeringConnectionId() != null && other.getVpcPeeringConnectionId().equals(this.getVpcPeeringConnectionId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getGroupId() == null) ? 0 : getGroupId().hashCode()); hashCode = prime * hashCode + ((getPeeringStatus() == null) ? 0 : getPeeringStatus().hashCode()); hashCode = prime * hashCode + ((getUserId() == null) ? 0 : getUserId().hashCode()); hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode()); hashCode = prime * hashCode + ((getVpcPeeringConnectionId() == null) ? 0 : getVpcPeeringConnectionId().hashCode()); return hashCode; } @Override public ReferencedSecurityGroup clone() { try { return (ReferencedSecurityGroup) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package org.openforis.collect.designer.viewmodel; import static org.openforis.collect.designer.viewmodel.SurveyBaseVM.SurveyType.PUBLISHED; import static org.openforis.collect.designer.viewmodel.SurveyBaseVM.SurveyType.TEMPORARY; import static org.openforis.collect.designer.viewmodel.SurveyExportParametersVM.SurveyExportParametersFormObject.OutputFormat.DESKTOP; import static org.openforis.collect.designer.viewmodel.SurveyExportParametersVM.SurveyExportParametersFormObject.OutputFormat.EARTH; import static org.openforis.collect.designer.viewmodel.SurveyExportParametersVM.SurveyExportParametersFormObject.OutputFormat.RDB; import static org.openforis.collect.metamodel.SurveyTarget.COLLECT_EARTH; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URLConnection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.ObjectUtils; import org.openforis.collect.designer.util.MessageUtil; import org.openforis.collect.designer.util.Resources; import org.openforis.collect.designer.util.SuccessHandler; import org.openforis.collect.designer.viewmodel.JobStatusPopUpVM.JobEndHandler; import org.openforis.collect.designer.viewmodel.SurveyBaseVM.SurveyType; import org.openforis.collect.designer.viewmodel.SurveyExportParametersVM.SurveyExportParametersFormObject.OutputFormat; import org.openforis.collect.designer.viewmodel.SurveyValidationResultsVM.ConfirmEvent; import org.openforis.collect.io.SurveyBackupJob; import org.openforis.collect.io.data.DataBackupError; import org.openforis.collect.io.metadata.collectearth.CollectEarthProjectFileCreator; import org.openforis.collect.io.metadata.collectearth.CollectEarthProjectFileCreatorImpl; import org.openforis.collect.manager.CodeListManager; import org.openforis.collect.manager.RecordManager; import org.openforis.collect.manager.SurveyManager; import org.openforis.collect.manager.validation.CollectEarthSurveyValidator; import org.openforis.collect.manager.validation.CollectMobileSurveyValidator; import org.openforis.collect.manager.validation.SurveyValidator; import org.openforis.collect.manager.validation.SurveyValidator.SurveyValidationResults; import org.openforis.collect.metamodel.SurveyTarget; import org.openforis.collect.model.CollectSurvey; import org.openforis.collect.model.RecordFilter; import org.openforis.collect.model.SurveySummary; import org.openforis.collect.relational.print.RDBPrintJob; import org.openforis.collect.relational.print.RDBPrintJob.RdbDialect; import org.openforis.collect.utils.Dates; import org.openforis.concurrency.Job; import org.springframework.http.MediaType; import org.zkoss.bind.Form; import org.zkoss.bind.SimpleForm; import org.zkoss.bind.annotation.Command; import org.zkoss.bind.annotation.DependsOn; import org.zkoss.bind.annotation.ExecutionArgParam; import org.zkoss.bind.annotation.Init; import org.zkoss.util.logging.Log; import org.zkoss.util.resource.Labels; import org.zkoss.zk.ui.event.EventListener; import org.zkoss.zk.ui.select.annotation.WireVariable; import org.zkoss.zul.Filedownload; import org.zkoss.zul.Window; /** * * @author S. Ricci * */ public class SurveyExportParametersVM extends BaseVM { private static Log LOG = Log.lookup(SurveyExportParametersVM.class); private static final String DEFAULT_DATE_TIME_FORMAT = "yyyy-MM-dd HH:mm:ss"; /** * Pattern for survey export file name (SURVEYNAME_DATE.OUTPUTFORMAT) */ private static final String SURVEY_EXPORT_FILE_NAME_PATTERN = "%s_%s.%s"; private static final String SURVEY_EXPORT_MOBILE_FILE_NAME_PATTERN = "%s_%s_%s.%s"; private static final String COLLECT_EARTH_PROJECT_FILE_EXTENSION = "cep"; private static final CollectEarthProjectFileCreator COLLECT_EARTH_PROJECT_FILE_CREATOR; static { Iterator<CollectEarthProjectFileCreator> it = COLLECT_EARTH_PROJECT_FILE_CREATOR_LOADER.iterator(); COLLECT_EARTH_PROJECT_FILE_CREATOR = it.hasNext() ? it.next(): null; } @WireVariable private SurveyManager surveyManager; @WireVariable private RecordManager recordManager; @WireVariable private CodeListManager codeListManager; @WireVariable private SurveyValidator surveyValidator; @WireVariable private CollectEarthSurveyValidator collectEarthSurveyValidator; @WireVariable private CollectMobileSurveyValidator collectMobileSurveyValidator; private SurveySummary surveySummary; private SurveyExportParametersFormObject formObject; private Form tempForm; private Window jobStatusPopUp; public static void openPopUp(SurveySummary surveySummary) throws IOException { Map<String, Object> args = new HashMap<String, Object>(); args.put("survey", surveySummary); openPopUp(Resources.Component.SURVEY_EXPORT_PARAMETERS_POPUP.getLocation(), true, args); } @Init public void init(@ExecutionArgParam("survey") SurveySummary survey) { this.surveySummary = survey; this.formObject = new SurveyExportParametersFormObject(); String outputFormat = (survey.getTarget() == COLLECT_EARTH ? EARTH : DESKTOP).name(); this.formObject.setOutputFormat(outputFormat); this.formObject.setType((survey.isNotLinkedToPublishedSurvey() ? TEMPORARY: PUBLISHED).name()); this.formObject.setRdbDialect(RdbDialect.STANDARD.name()); this.formObject.setRdbDateTimeFormat(DEFAULT_DATE_TIME_FORMAT); this.formObject.setRdbTargetSchemaName(survey.getName()); this.formObject.setLanguageCode(survey.getDefaultLanguage()); this.tempForm = new SimpleForm(); } @Command public void typeChanged() { checkEnabledFields(); } @Command public void outputFormatChanged() { checkEnabledFields(); } @Command public void includeDataChanged() { checkEnabledFields(); } @Command public void export() { String uri = surveySummary.getUri(); final CollectSurvey loadedSurvey; if ( surveySummary.isTemporary() && SurveyType.valueOf(formObject.getType()) == TEMPORARY ) { loadedSurvey = surveyManager.loadSurvey(surveySummary.getId()); } else { loadedSurvey = surveyManager.getByUri(uri); } switch(formObject.getOutputFormatEnum()) { case EARTH: validateSurvey(loadedSurvey, collectEarthSurveyValidator, new SuccessHandler() { public void onSuccess() { exportCollectEarthSurvey(loadedSurvey, formObject); } }, true); return; case RDB: startRDBSurveyExportJob(loadedSurvey, formObject); break; case MOBILE: validateSurvey(loadedSurvey, collectMobileSurveyValidator, new SuccessHandler() { public void onSuccess() { startCollectSurveyExportJob(loadedSurvey, formObject); } }, true); break; default: startCollectSurveyExportJob(loadedSurvey, formObject); break; } } private void downloadFile(File file, String extension, String contentType, CollectSurvey survey, String defaultLanguageCode) { String surveyName = survey.getName(); String dateStr = Dates.formatLocalDateTime(survey.getModifiedDate()); String fileName; if (org.openforis.collect.io.SurveyBackupJob.OutputFormat.MOBILE.getOutputFileExtension().equals(extension)) { fileName = String.format(SURVEY_EXPORT_MOBILE_FILE_NAME_PATTERN, surveyName, defaultLanguageCode, dateStr, extension); } else { fileName = String.format(SURVEY_EXPORT_FILE_NAME_PATTERN, surveyName, dateStr, extension); } try { Filedownload.save(new FileInputStream(file), contentType, fileName); } catch (FileNotFoundException e) { LOG.error(e); MessageUtil.showError("survey.export_survey.error", e.getMessage()); } } private void startRDBSurveyExportJob(final CollectSurvey survey, final SurveyExportParametersFormObject parameters) { RDBPrintJob job = new RDBPrintJob(); job.setSurvey(survey); job.setTargetSchemaName(survey.getName()); job.setRecordManager(recordManager); RecordFilter recordFilter = new RecordFilter(survey); job.setRecordFilter(recordFilter); job.setIncludeData(parameters.isIncludeData()); job.setDialect(parameters.getRdbDialectEnum()); job.setDateTimeFormat(parameters.getRdbDateTimeFormat()); job.setTargetSchemaName(parameters.getRdbTargetSchemaName()); jobManager.start(job, String.valueOf(survey.getId())); openJobStatusPopUp(survey.getName(), job, new ExportJobEndHandler<RDBPrintJob>() { @Override protected void onJobCompleted() { File file = job.getOutputFile(); CollectSurvey survey = job.getSurvey(); String extension = "sql"; downloadFile(file, extension, MediaType.TEXT_PLAIN_VALUE, survey, survey.getDefaultLanguage()); super.onJobCompleted(); } }); } private <J extends Job> void openJobStatusPopUp(String surveyName, J job, JobEndHandler<J> jobEndHandler) { String title = Labels.getLabel("survey.export_survey.process_status_popup.message", new String[] { surveyName }); jobStatusPopUp = JobStatusPopUpVM.openPopUp(title, job, true, jobEndHandler); } private void exportCollectEarthSurvey(final CollectSurvey survey, final SurveyExportParametersFormObject parameters) { FileInputStream is = null; try { CollectEarthProjectFileCreatorImpl creatorImpl = (CollectEarthProjectFileCreatorImpl) COLLECT_EARTH_PROJECT_FILE_CREATOR; creatorImpl.setCodeListManager(codeListManager); creatorImpl.setSurveyManager(surveyManager); String languageCode = parameters.getLanguageCode(); File file = COLLECT_EARTH_PROJECT_FILE_CREATOR.create(survey, languageCode); String contentType = URLConnection.guessContentTypeFromName(file.getName()); is = new FileInputStream(file); String outputFileName = String.format("%s_%s_%s.%s", survey.getName(), languageCode, Dates.formatLocalDateTime(survey.getModifiedDate()), COLLECT_EARTH_PROJECT_FILE_EXTENSION); Filedownload.save(is, contentType, outputFileName); } catch(Exception e) { LOG.error(e); MessageUtil.showError("survey.export.error_generating_collect_earth_project_file", e.getMessage()); } finally { IOUtils.closeQuietly(is); } } protected void startCollectSurveyExportJob(CollectSurvey survey, SurveyExportParametersFormObject parameters) { SurveyBackupJob job = jobManager.createJob(SurveyBackupJob.class); job.setSurvey(survey); job.setIncludeData(parameters.isIncludeData()); job.setIncludeRecordFiles(parameters.isIncludeUploadedFiles()); job.setOutputFormat(org.openforis.collect.io.SurveyBackupJob.OutputFormat.valueOf(parameters.getOutputFormat())); job.setOutputSurveyDefaultLanguage(ObjectUtils.defaultIfNull(parameters.getLanguageCode(), survey.getDefaultLanguage())); jobManager.start(job, String.valueOf(survey.getId())); openJobStatusPopUp(survey.getName(), job, new ExportJobEndHandler<SurveyBackupJob>() { @Override protected void onJobCompleted() { File file = job.getOutputFile(); downloadFile(file, job.getOutputFormat().getOutputFileExtension(), MediaType.APPLICATION_OCTET_STREAM_VALUE, job.getSurvey(), job.getOutputSurveyDefaultLanguage()); final List<DataBackupError> dataBackupErrors = job.getDataBackupErrors(); if (! dataBackupErrors.isEmpty()) { DataExportErrorsPopUpVM.showPopUp(dataBackupErrors); } super.onJobCompleted(); } }); } private class ExportJobEndHandler<J extends Job> implements JobEndHandler<J> { public void onJobEnd(J job) { switch(job.getStatus()) { case COMPLETED: onJobCompleted(); break; case FAILED: MessageUtil.showError("survey.export.error", Labels.getLabel(job.getErrorMessage(), job.getErrorMessageArgs())); break; default: } closeJobStatusPopUp(); } protected void onJobCompleted() { MessageUtil.showInfo("survey.export.completed"); } } private void validateSurvey(CollectSurvey survey, SurveyValidator validator, final SuccessHandler successHandler, boolean showWarningConfirm) { SurveyValidationResults validationResults = validator.validate(survey); if (validationResults.isOk()) { successHandler.onSuccess(); } else { final Window validationResultsPopUp = SurveyValidationResultsVM.showPopUp(validationResults, showWarningConfirm && ! validationResults.hasErrors()); validationResultsPopUp.addEventListener(SurveyValidationResultsVM.CONFIRM_EVENT_NAME, new EventListener<ConfirmEvent>() { public void onEvent(ConfirmEvent event) throws Exception { successHandler.onSuccess(); closePopUp(validationResultsPopUp); } }); } } protected void closeJobStatusPopUp() { closePopUp(jobStatusPopUp); jobStatusPopUp = null; } @DependsOn({"tempForm.type","tempForm.outputFormat"}) public boolean isIncludeDataVisible() { SurveyType type = SurveyType.valueOf(getTypeFormField()); OutputFormat outputFormat = OutputFormat.valueOf(getOutputFormatFormField()); return type == PUBLISHED && outputFormat == RDB; } public boolean isCollectEarthSurvey() { return surveySummary != null && surveySummary.getTarget() == SurveyTarget.COLLECT_EARTH; } public SurveyExportParametersFormObject getFormObject() { return formObject; } public void setFormObject(SurveyExportParametersFormObject formObject) { this.formObject = formObject; } public SurveySummary getSurvey() { return surveySummary; } public Form getTempForm() { return tempForm; } public void setTempForm(Form tempForm) { this.tempForm = tempForm; } private void checkEnabledFields() { if ( isIncludeDataVisible() ) { boolean includeData = getFormFieldValue(tempForm, "includeData"); if ( ! includeData ) { setFormFieldValue(tempForm, "includeUploadedFiles", false); } } else { setFormFieldValue(tempForm, "includeData", false); } } private String getOutputFormatFormField() { return getFormFieldValue(tempForm, "outputFormat"); } private String getTypeFormField() { return getFormFieldValue(tempForm, "type"); } public List<String> getSurveyLanguages() { return surveySummary.getLanguages(); } public static class SurveyExportParametersFormObject { public enum OutputFormat { MOBILE, DESKTOP, RDB, EARTH } private String type; private boolean includeData; private boolean includeUploadedFiles; private String outputFormat; private String rdbDialect; private String rdbDateTimeFormat; private String rdbTargetSchemaName; private String languageCode; public String getType() { return type; } public SurveyType getTypeEnum() { return SurveyType.valueOf(type); } public void setType(String type) { this.type = type; } public boolean isIncludeData() { return includeData; } public void setIncludeData(boolean includeData) { this.includeData = includeData; } public boolean isIncludeUploadedFiles() { return includeUploadedFiles; } public void setIncludeUploadedFiles(boolean includeUploadedFiles) { this.includeUploadedFiles = includeUploadedFiles; } public String getOutputFormat() { return outputFormat; } public OutputFormat getOutputFormatEnum() { return OutputFormat.valueOf(outputFormat); } public void setOutputFormat(String outputFormat) { this.outputFormat = outputFormat; } public String getRdbDialect() { return rdbDialect; } public RdbDialect getRdbDialectEnum() { return RdbDialect.valueOf(rdbDialect); } public void setRdbDialect(String rdbDialect) { this.rdbDialect = rdbDialect; } public String getRdbDateTimeFormat() { return rdbDateTimeFormat; } public void setRdbDateTimeFormat(String rdbDateTimeFormat) { this.rdbDateTimeFormat = rdbDateTimeFormat; } public String getRdbTargetSchemaName() { return rdbTargetSchemaName; } public void setRdbTargetSchemaName(String rdbTargetSchemaName) { this.rdbTargetSchemaName = rdbTargetSchemaName; } public String getLanguageCode() { return languageCode; } public void setLanguageCode(String languageCode) { this.languageCode = languageCode; } } }
/* * "Copyright (c) 2014 Capgemini Technology Services (hereinafter "Capgemini") * * License/Terms of Use * Permission is hereby granted, free of charge and for the term of intellectual * property rights on the Software, to any person obtaining a copy of this software * and associated documentation files (the "Software"), to use, copy, modify and * propagate free of charge, anywhere in the world, all or part of the Software * subject to the following mandatory conditions: * * - The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * Any failure to comply with the above shall automatically terminate the license * and be construed as a breach of these Terms of Use causing significant harm to * Capgemini. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, PEACEFUL ENJOYMENT, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS * OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * Except as contained in this notice, the name of Capgemini shall not be used in * advertising or otherwise to promote the use or other dealings in this Software * without prior written authorization from Capgemini. * * These Terms of Use are subject to French law. * * IMPORTANT NOTICE: The WUIC software implements software components governed by * open source software licenses (BSD and Apache) of which CAPGEMINI is not the * author or the editor. The rights granted on the said software components are * governed by the specific terms and conditions specified by Apache 2.0 and BSD * licenses." */ package com.github.wuic.engine.impl.embedded; import com.github.wuic.NutType; import com.github.wuic.engine.*; import com.github.wuic.exception.WuicException; import com.github.wuic.exception.wrapper.BadArgumentException; import com.github.wuic.exception.wrapper.StreamException; import com.github.wuic.nut.*; import com.github.wuic.nut.core.ByteArrayNut; import com.github.wuic.nut.core.ProxyNutDao; import com.github.wuic.nut.filter.NutFilter; import com.github.wuic.util.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.math.BigInteger; import java.security.MessageDigest; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * <p> * This engine is able to parse an HTML to collect all CSS and JS scripts, refer them through a heap and set * reference to associated workflow URL in DOM. * </p> * * <p> * Some points have to be notified: * </p> * * <ul> * <li>Observing collected nuts is not possible here so polling won't be perform to invalidate any cache.</li> * <li>Version number is based on content hash because this is the unique strategy that applies to inline scripts</li> * </ul> * * * @author Guillaume DROUET * @version 1.0 * @since 0.4.4 */ public class HtmlInspectorEngine extends NodeEngine { /** * We use a specific parser for each matched group. Store them in a TreeMap to ensure that lower groups will be tested before higher. */ private static final Map<Integer, BiFunction<String, ProxyNutDao, String>> PARSERS = new TreeMap<Integer, BiFunction<String, ProxyNutDao, String>>() { { put(NumberUtils.SIX, new HrefParser()); } { put(NumberUtils.FOURTEEN, new DefaultParser()); } { put(1, new JsParser()); } { put(NumberUtils.THIRTEEN, new CssParser()); } }; /** * Engines types that will be skipped when processing referenced nuts. */ private static final EngineType[] SKIPPED_ENGINE = new EngineType[] { EngineType.CACHE, }; /** * Regex that matches JS script import or JS declaration. */ private static final String JS_SCRIPT_PATTERN = String.format("(<%1$s.*?(%2$s=)?(([^>]*>[^<]*</%1$s>)|([^/]*/>)))", "script", "src"); /** * Regex that matches CSS script import. */ private static final String HREF_SCRIPT_PATTERN = String.format("(<%1$s.*?(%2$s=)?(([^>]*>)(([^<]*</%1$s>)|([^/]*/>))?))", "link", "href"); /** * Regex that matches CSS declaration. */ private static final String CSS_SCRIPT_PATTERN = "(<style>.*?</style>)"; /** * Regex that matches HTML comment. */ private static final String HTML_COMMENT_PATTERN = "(<!--.*?-->)"; /** * The entire regex that collects desired data. */ private static final String REGEX = String.format("%s|%s|%s|%s", JS_SCRIPT_PATTERN, HREF_SCRIPT_PATTERN, CSS_SCRIPT_PATTERN, HTML_COMMENT_PATTERN); /** * The pattern that collects desired data */ private static final Pattern PATTERN = Pattern.compile(REGEX, Pattern.CASE_INSENSITIVE | Pattern.DOTALL); /** * The logger. */ private final Logger logger = LoggerFactory.getLogger(getClass()); /** * The applied filters */ private final List<NutFilter> nutFilters; /** * Inspects or not. */ private Boolean doInspection; /** * The charset of inspected file. */ private String charset; /** * <p> * Builds a new instance. * </p> * * @param inspect activate inspection or not * @param cs files charset * @param filters the nut filters to apply */ public HtmlInspectorEngine(final List<NutFilter> filters, final Boolean inspect, final String cs) { doInspection = inspect; charset = cs; nutFilters = filters; } /** * {@inheritDoc} */ @Override public List<Nut> internalParse(final EngineRequest request) throws WuicException { // Will contains both heap's nuts eventually modified or extracted nuts. final List<Nut> retval = new ArrayList<Nut>(); if (works()) { for (final Nut nut : request.getNuts()) { try { retval.add(transformHtml(nut, request.getContextPath(), request)); } catch (IOException ioe) { throw new StreamException(ioe); } } } if (getNext() != null) { return getNext().parse(new EngineRequest(retval, request)); } else { return retval; } } /** * <p> * This abstract class is a base for script references. * </p> * * @author Guillaume DROUET * @version 1.0 * @since 0.4.4 */ private abstract static class ScriptParser implements BiFunction<String, ProxyNutDao, String> { /** * <p> * Returns the token that refers the URL. * </p> * * @return the token */ protected abstract String urlToken(); /** * <p> * Returns the {@link NutType} of created nut. * </p> * * @return the nut type */ protected abstract NutType getNutType(); /** * <p> * Indicates if the parser should tries to read ant inline content. * </p> * @return {@link true} if inline content needs to be read, {@code false} otherwise */ protected abstract Boolean readInlineIfTokenNotFound(); /** * {@inheritDoc} */ @Override public String apply(final String s, final ProxyNutDao proxy) { if (s.contains("data-wuic-skip")) { return null; } final String token = urlToken(); int index = token.isEmpty() ? -1 : s.indexOf(token) + token.length(); if (index != token.length() - 1) { char c = s.charAt(index); final String retval; if (c == '\'' || c == '"') { retval = s.substring(index + 1, s.indexOf(c, index + 1)); } else { int end = s.indexOf(' ', index); // No space, search /> if (end == -1) { end = s.indexOf('>', index); if (s.charAt(end - 1) == '/') { end--; } } retval = s.substring(index, end); } if (retval.startsWith("http://") || retval.startsWith("https://")) { return null; } else { return retval; } } else if (readInlineIfTokenNotFound()) { // Looking for content final int start = s.indexOf('>') + 1; final int end = s.indexOf('<', start - 1); final byte[] content = s.substring(start, end).getBytes(); // Sign content final MessageDigest md = IOUtils.newMessageDigest(); md.update(content); final BigInteger id = new BigInteger(md.digest()); // Create nut final NutType nt = getNutType(); final String retval = String.format("%s%s", id.toString(NumberUtils.SIXTEEN), nt.getExtensions()[0]); proxy.addRule(retval, new ByteArrayNut(content, retval, nt, id)); return retval; } else { return null; } } } /** * <p> * This class parses links to JS scripts and inline JS scripts. * </p> * * @author Guillaume DROUET * @version 1.0 * @since 0.4.4 */ private static class JsParser extends ScriptParser { /** * {@inheritDoc} */ @Override public String urlToken() { return "src="; } /** * {@inheritDoc} */ @Override public Boolean readInlineIfTokenNotFound() { return Boolean.TRUE; } /** * {@inheritDoc} */ @Override public NutType getNutType() { return NutType.JAVASCRIPT; } } /** * <p> * This class parses links to CSS. * </p> * * @author Guillaume DROUET * @version 1.0 * @since 0.4.4 */ private static class HrefParser extends ScriptParser { /** * {@inheritDoc} */ @Override public String urlToken() { return "href="; } /** * {@inheritDoc} */ @Override public Boolean readInlineIfTokenNotFound() { return Boolean.FALSE; } /** * {@inheritDoc} */ @Override public NutType getNutType() { return NutType.CSS; } } /** * <p> * This class parses inline CSS. * </p> * * @author Guillaume DROUET * @version 1.0 * @since 0.4.4 */ private static class CssParser extends ScriptParser { /** * {@inheritDoc} */ @Override public String urlToken() { return ""; } /** * {@inheritDoc} */ @Override public Boolean readInlineIfTokenNotFound() { return Boolean.TRUE; } /** * {@inheritDoc} */ @Override public NutType getNutType() { return NutType.CSS; } } /** * <p> * This class returns always {@link null}, telling the caller to not replace the parsed {@code String}. * </p> * * @author Guillaume DROUET * @version 1.0 * @since 0.4.4 */ private static class DefaultParser implements BiFunction<String, ProxyNutDao, String> { /** * {@inheritDoc} */ @Override public String apply(final String s, final ProxyNutDao proxyNutDao) { return null; } } /** * <p> * Provides information from collected data in a HTML content. * </p> * * @author Guillaume DROUET * @version 1.0 * @since 0.4.4 */ public final class ParseInfo { /** * The heap generated during collect. */ private NutsHeap heap; /** * Collected statements. */ private List<String> capturedStatements; /** * <p> * Builds a new instance. * </p> * * @param groups captured statements * @param proxyNutDao the DAO to use when computing path from collected data * @param rootPath the root path of content * @throws StreamException if any I/O error occurs */ private ParseInfo(final Map<String, Integer> groups, final ProxyNutDao proxyNutDao, final String rootPath) throws StreamException { final String[] groupPaths = new String[groups.size()]; this.capturedStatements = new ArrayList<String>(groups.keySet()); int cpt = 0; // Gets the appropriate parser for each captured group according to their position and compute path for (final Map.Entry<String, Integer> entry : groups.entrySet()) { final String path = PARSERS.get(entry.getValue()).apply(entry.getKey(), proxyNutDao); // Path is null, do not replace anything if (path != null) { final String simplify = rootPath.isEmpty() ? path : IOUtils.mergePath(rootPath, path); final String simplified = StringUtils.simplifyPathWithDoubleDot(simplify); if (simplified == null) { throw new BadArgumentException(new IllegalArgumentException(String.format("%s does not represents a reachable path", simplify))); } groupPaths[cpt++] = simplified; } else { this.capturedStatements.remove(entry.getKey()); } } // No info have been collected if (cpt == 0) { return; } // All paths computed from captured statements. final String[] paths = new String[cpt]; System.arraycopy(groupPaths, 0, paths, 0, cpt); List<String> filteredPath = CollectionUtils.newList(paths); for (final NutFilter filter : nutFilters) { filteredPath = filter.filterPaths(filteredPath); } final byte[] hash = IOUtils.digest(filteredPath.toArray(new String[filteredPath.size()])); final String heapId = new BigInteger(hash).toString(NumberUtils.SIXTEEN); heap = new NutsHeap(filteredPath, proxyNutDao, heapId); } /** * <p> * Gets the captured statement by parsing. * </p> * * @return the statements */ public List<String> getCapturedStatements() { return capturedStatements; } /** * <p> * Returns the computed heap. * </p> * * @return the heap */ public NutsHeap getHeap() { return heap; } /** * <p> * Add this info to the given list if and only if some statements have been captured * </p> * * @param list the list */ public void addTo(final List<ParseInfo> list) { if (!capturedStatements.isEmpty()) { list.add(this); } } } /** * <p> * Parses the given HTML content and returns all the information collected during the operation. * </p> * * <p> * When a script is referenced, the given {@link NutDao} will be used to retrieve the corresponding {@link Nut}. * </p> * * @param content the HTML content to parse * @param dao the DAO * @return the parsed HTML * @throws WuicException if WUIC fails to configure context or process created workflow */ private List<ParseInfo> parse(final String content, final NutDao dao, final String rootPath) throws WuicException { // Create a proxy that maps inline scripts final ProxyNutDao proxy = new ProxyNutDao(rootPath, dao); // Create the matcher from the given content, we will keep in an integer the end position of group that previously matched final Matcher matcher = PATTERN.matcher(content); int previousGroupEnd = -1; // All the paths we have currently collected final Map<String, Integer> paths = new LinkedHashMap<String, Integer>(); final List<ParseInfo> retval = new ArrayList<ParseInfo>(); // Finds desired groups while (matcher.find()) { // There is something to parse if (!matcher.group().trim().isEmpty()) { /* * We've already matched some scripts and there is something (excluding comment and whitespace) between * the previous script and the script currently matched that implies that they should not be imported * together. Consequently, we create here a separate heap. */ if (previousGroupEnd != -1 && !content.substring(previousGroupEnd + 1, matcher.start()).trim().isEmpty()) { new ParseInfo(paths, proxy, rootPath).addTo(retval); paths.clear(); } // Now find the appropriate parser for (final Integer groupPosition : PARSERS.keySet()) { // Test value at the associated group position to find the appropriate parser if (matcher.group(groupPosition) != null) { paths.put(matcher.group(), groupPosition); break; } } previousGroupEnd = matcher.end(); } } // Create a heap for remaining paths if (!paths.isEmpty()) { new ParseInfo(paths, proxy, rootPath).addTo(retval); } return retval; } /** * <p> * Transforms the given HTML content and returns the replacements done with the collected parse information. * </p> * * @param nut the HTML content to parse * @param request the request * @return the nut wrapping parsed HTML * @throws WuicException if WUIC fails to configure context or process created workflow */ public Nut transformHtml(final Nut nut, final String contextPath, final EngineRequest request) throws WuicException, IOException { final long now = System.currentTimeMillis(); InputStream is = null; final String content; try { is = nut.openStream(); content = IOUtils.readString(new InputStreamReader(is, charset)); } finally { IOUtils.close(is); } final int endParent = nut.getName().lastIndexOf('/'); final String rootPath = endParent == -1 ? "" : nut.getName().substring(0, endParent); final List<ParseInfo> parseInfoList = parse(content, request.getHeap().findDaoFor(nut), rootPath); final StringBuilder transform = new StringBuilder(content); int end = 0; final List<Nut> referenced = new ArrayList<Nut>(); // A workflow have been created for each heap for (final ParseInfo parseInfo : parseInfoList) { // Render HTML for workflow result final StringBuilder html = new StringBuilder(); final EngineRequest parseRequest = new EngineRequest(parseInfo.getHeap().getNuts(), parseInfo.getHeap(), request, SKIPPED_ENGINE); final List<Nut> merged = HeadEngine.runChains(parseRequest, Boolean.FALSE); for (final Nut n : merged) { try { // Just add the heap ID as prefix to refer many nuts with same name but from different heaps final Nut renamed = new PrefixedNut(n, parseInfo.getHeap().getId(), Boolean.FALSE); referenced.add(renamed); html.append(HtmlUtil.writeScriptImport(renamed, IOUtils.mergePath(contextPath, request.getWorkflowId()))).append("\r\n"); } catch (IOException ioe) { throw new StreamException(ioe); } } // Replace all captured statements with HTML generated from WUIC process for (int i = 0; i < parseInfo.getCapturedStatements().size(); i++) { final String toReplace = parseInfo.getCapturedStatements().get(i); int start = transform.indexOf(toReplace, end); end = start + toReplace.length(); // Add the WUIC result in place of the first statement if (i == 0) { final String replacement = html.toString(); transform.replace(start, end, replacement); end = start + replacement.length(); } else { transform.replace(start, end, ""); end = start; } } } final Nut retval = new ByteArrayNut(transform.toString().getBytes(), nut.getName(), nut.getNutType(), nut.getVersionNumber()); for (final Nut ref : referenced) { retval.addReferencedNut(ref); } logger.info("HTML transformation in {}ms", System.currentTimeMillis() - now); return retval; } /** * {@inheritDoc} */ @Override public List<NutType> getNutTypes() { return Arrays.asList(NutType.HTML); } /** * {@inheritDoc} */ @Override public EngineType getEngineType() { return EngineType.INSPECTOR; } /** * {@inheritDoc} */ @Override public Boolean works() { return doInspection; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/v1beta1/datacatalog.proto package com.google.cloud.datacatalog.v1beta1; /** * * * <pre> * Request message for * [GetTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.GetTagTemplateRequest} */ public final class GetTagTemplateRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1beta1.GetTagTemplateRequest) GetTagTemplateRequestOrBuilder { private static final long serialVersionUID = 0L; // Use GetTagTemplateRequest.newBuilder() to construct. private GetTagTemplateRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GetTagTemplateRequest() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GetTagTemplateRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetTagTemplateRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_GetTagTemplateRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_GetTagTemplateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest.class, com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * * * <pre> * Required. The name of the tag template. Example: * * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The name of the tag template. Example: * * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest)) { return super.equals(obj); } com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest other = (com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest) obj; if (!getName().equals(other.getName())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [GetTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.GetTagTemplateRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1beta1.GetTagTemplateRequest) com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_GetTagTemplateRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_GetTagTemplateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest.class, com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest.Builder.class); } // Construct using com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); name_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_GetTagTemplateRequest_descriptor; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest getDefaultInstanceForType() { return com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest build() { com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest buildPartial() { com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest result = new com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest(this); result.name_ = name_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest) { return mergeFrom((com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest other) { if (other == com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object name_ = ""; /** * * * <pre> * Required. The name of the tag template. Example: * * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the tag template. Example: * * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the tag template. Example: * * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * * * <pre> * Required. The name of the tag template. Example: * * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * * * <pre> * Required. The name of the tag template. Example: * * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1beta1.GetTagTemplateRequest) } // @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.GetTagTemplateRequest) private static final com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest(); } public static com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GetTagTemplateRequest> PARSER = new com.google.protobuf.AbstractParser<GetTagTemplateRequest>() { @java.lang.Override public GetTagTemplateRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetTagTemplateRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<GetTagTemplateRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GetTagTemplateRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package tv.savageboy74.savagetech.items.lootbag; /* * InventoryLootBag.java * Copyright (C) 2016 Savage - github.com/savageboy74 * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.IInventory; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.util.text.ITextComponent; import tv.savageboy74.savagetech.util.helper.NBTHelper; import tv.savageboy74.savagetech.util.reference.Names; import java.util.UUID; public class InventoryLootBag implements IInventory { public ItemStack[] inventory; protected String customName; public ItemStack parentItemStack; public InventoryLootBag(ItemStack stack) { parentItemStack = stack; int size = ContainerLootBag.BAG_INVENTORY_ROWS * ContainerLootBag.BAG_INVENTORY_COLUMNS; inventory = new ItemStack[size]; readFromNBT(stack.getTagCompound()); } public void onGuiSaved(EntityPlayer player) { parentItemStack = findParentItemStack(player); if (parentItemStack != null) { save(); } } public void save() { NBTTagCompound nbtTagCompound = parentItemStack.getTagCompound(); if (nbtTagCompound == null) { nbtTagCompound = new NBTTagCompound(); UUID uuid = UUID.randomUUID(); nbtTagCompound.setLong(Names.NBT.UUID_MOST_SIG, uuid.getMostSignificantBits()); nbtTagCompound.setLong(Names.NBT.UUID_LEAST_SIG, uuid.getLeastSignificantBits()); } writeToNBT(nbtTagCompound); parentItemStack.setTagCompound(nbtTagCompound); } public ItemStack findParentItemStack(EntityPlayer entityPlayer) { if (NBTHelper.hasUUID(parentItemStack)) { UUID parentItemStackUUID = new UUID(parentItemStack.getTagCompound().getLong(Names.NBT.UUID_MOST_SIG), parentItemStack.getTagCompound().getLong(Names.NBT.UUID_LEAST_SIG)); for (int i = 0; i < entityPlayer.inventory.getSizeInventory(); i++) { ItemStack itemStack = entityPlayer.inventory.getStackInSlot(i); if (NBTHelper.hasUUID(itemStack)) { if (itemStack.getTagCompound().getLong(Names.NBT.UUID_MOST_SIG) == parentItemStackUUID.getMostSignificantBits() && itemStack.getTagCompound().getLong(Names.NBT.UUID_LEAST_SIG) == parentItemStackUUID.getLeastSignificantBits()) { return itemStack; } } } } return null; } public void readFromNBT(NBTTagCompound nbtTagCompound) { if (nbtTagCompound != null && nbtTagCompound.hasKey(Names.NBT.ITEMS)) { if (nbtTagCompound.hasKey(Names.NBT.ITEMS)) { NBTTagList tagList = nbtTagCompound.getTagList(Names.NBT.ITEMS, 10); inventory = new ItemStack[this.getSizeInventory()]; for (int i = 0; i < tagList.tagCount(); ++i) { NBTTagCompound tagCompound = tagList.getCompoundTagAt(i); byte slotIndex = tagCompound.getByte("Slot"); if (slotIndex >= 0 && slotIndex < inventory.length) { inventory[slotIndex] = ItemStack.loadItemStackFromNBT(tagCompound); } } } if (nbtTagCompound.hasKey("display") && nbtTagCompound.getTag("display").getClass().equals(NBTTagCompound.class)) { if (nbtTagCompound.getCompoundTag("display").hasKey("Name")) { customName = nbtTagCompound.getCompoundTag("display").getString("Name"); } } } } public void writeToNBT(NBTTagCompound nbtTagCompound) { NBTTagList tagList = new NBTTagList(); for (int currentIndex = 0; currentIndex < inventory.length; ++currentIndex) { if (inventory[currentIndex] != null) { NBTTagCompound tagCompound = new NBTTagCompound(); tagCompound.setByte("Slot", (byte) currentIndex); inventory[currentIndex].writeToNBT(tagCompound); tagList.appendTag(tagCompound); } } nbtTagCompound.setTag(Names.NBT.ITEMS, tagList); } @Override public int getSizeInventory() { return inventory.length; } @Override public ItemStack getStackInSlot(int slotIndex) { return inventory[slotIndex]; } @Override public ItemStack decrStackSize(int slotIndex, int decrementAmount) { ItemStack itemStack = getStackInSlot(slotIndex); if (itemStack != null) { if (itemStack.stackSize <= decrementAmount) { setInventorySlotContents(slotIndex, null); } else { itemStack = itemStack.splitStack(decrementAmount); if (itemStack.stackSize == 0) { setInventorySlotContents(slotIndex, null); } } } return itemStack; } @Override public ItemStack removeStackFromSlot(int slotIndex) { if (inventory[slotIndex] != null) { ItemStack itemStack = inventory[slotIndex]; inventory[slotIndex] = null; return itemStack; } else { return null; } } @Override public void setInventorySlotContents(int slotIndex, ItemStack stack) { inventory[slotIndex] = stack; } @Override public int getInventoryStackLimit() { return 64; } @Override public void markDirty() { } @Override public boolean isUseableByPlayer(EntityPlayer player) { return true; } @Override public void openInventory(EntityPlayer player) { } @Override public void closeInventory(EntityPlayer player) { } @Override public boolean isItemValidForSlot(int index, ItemStack stack) { return false; } @Override public int getField(int id) { return 0; } @Override public void setField(int id, int value) { } @Override public int getFieldCount() { return 0; } @Override public void clear() { } @Override public String getName() { return this.hasCustomName() ? this.getCustomName() : Names.Containers.LOOT_BAG; } @Override public boolean hasCustomName() { return customName != null && customName.length() > 0; } @Override public ITextComponent getDisplayName() { return null; } public String getCustomName() { return customName; } }
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.bootstrap; import java.io.File; import java.io.FileFilter; import java.io.FilenameFilter; import java.io.IOException; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.jar.JarFile; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author emeroad */ public class AgentDirBaseClassPathResolver implements ClassPathResolver { private final BootLogger logger = BootLogger.getLogger(this.getClass().getName()); static final String VERSION_PATTERN = "(-[0-9]+\\.[0-9]+\\.[0-9]+((\\-SNAPSHOT)|(-RC[0-9]+))?)?"; static final Pattern DEFAULT_AGENT_PATTERN = compile("pinpoint-bootstrap" + VERSION_PATTERN + "\\.jar"); static final Pattern DEFAULT_AGENT_COMMONS_PATTERN = compile("pinpoint-commons" + VERSION_PATTERN + "\\.jar"); static final Pattern DEFAULT_AGENT_CORE_PATTERN = compile("pinpoint-bootstrap-core" + VERSION_PATTERN + "\\.jar"); static final Pattern DEFAULT_AGENT_CORE_OPTIONAL_PATTERN = compile("pinpoint-bootstrap-core-optional" + VERSION_PATTERN + "\\.jar"); static final Pattern DEFAULT_ANNOTATIONS = compile("pinpoint-annotations" + VERSION_PATTERN + "\\.jar"); private final Pattern agentPattern; private final Pattern agentCommonsPattern; private final Pattern agentCorePattern; private final Pattern agentCoreOptionalPattern; private final Pattern annotationsPattern; private String classPath; private String agentJarName; private String agentJarFullPath; private String agentDirPath; private List<String> fileExtensionList; private String pinpointCommonsJar; private String bootStrapCoreJar; private String bootStrapCoreOptionalJar; private String annotationsJar; private BootstrapJarFile bootstrapJarFile; private static Pattern compile(String regex) { return Pattern.compile(regex); } public AgentDirBaseClassPathResolver() { this(getClassPathFromSystemProperty()); } public AgentDirBaseClassPathResolver(String classPath) { this.classPath = classPath; this.agentPattern = DEFAULT_AGENT_PATTERN; this.agentCommonsPattern = DEFAULT_AGENT_COMMONS_PATTERN; this.agentCorePattern = DEFAULT_AGENT_CORE_PATTERN; this.agentCoreOptionalPattern = DEFAULT_AGENT_CORE_OPTIONAL_PATTERN; this.annotationsPattern = DEFAULT_ANNOTATIONS; this.fileExtensionList = getDefaultFileExtensionList(); } public List<String> getDefaultFileExtensionList() { List<String> extensionList = new ArrayList<String>(); extensionList.add("jar"); extensionList.add("xml"); extensionList.add("properties"); return extensionList; } public AgentDirBaseClassPathResolver(String classPath, String agentPattern) { this.classPath = classPath; this.agentPattern = Pattern.compile(agentPattern); this.agentCommonsPattern = DEFAULT_AGENT_COMMONS_PATTERN; this.agentCorePattern = DEFAULT_AGENT_CORE_PATTERN; this.agentCoreOptionalPattern = DEFAULT_AGENT_CORE_OPTIONAL_PATTERN; this.annotationsPattern = DEFAULT_ANNOTATIONS; this.fileExtensionList = getDefaultFileExtensionList(); } @Override public boolean verify() { final BootstrapJarFile bootstrapJarFile = new BootstrapJarFile(); // 1st find boot-strap.jar final boolean agentJarNotFound = this.findAgentJar(); if (!agentJarNotFound) { logger.warn("pinpoint-bootstrap-x.x.x(-SNAPSHOT).jar not found."); return false; } // 2nd find pinpoint-commons.jar final String pinpointCommonsJar = getPinpointCommonsJar(); if (pinpointCommonsJar == null) { logger.warn("pinpoint-commons-x.x.x(-SNAPSHOT).jar not found"); return false; } final JarFile pinpointCommonsJarFile = getJarFile(pinpointCommonsJar); if (pinpointCommonsJarFile == null) { logger.warn("pinpoint-commons-x.x.x(-SNAPSHOT).jar not found"); return false; } bootstrapJarFile.append(pinpointCommonsJarFile); // 3rd find bootstrap-core.jar final String bootStrapCoreJar = getBootStrapCoreJar(); if (bootStrapCoreJar == null) { logger.warn("pinpoint-bootstrap-core-x.x.x(-SNAPSHOT).jar not found"); return false; } JarFile bootStrapCoreJarFile = getJarFile(bootStrapCoreJar); if (bootStrapCoreJarFile == null) { logger.warn("pinpoint-bootstrap-core-x.x.x(-SNAPSHOT).jar not found"); return false; } bootstrapJarFile.append(bootStrapCoreJarFile); // 4th find bootstrap-core-optional.jar final String bootStrapCoreOptionalJar = getBootStrapCoreOptionalJar(); if (bootStrapCoreOptionalJar == null) { logger.info("pinpoint-bootstrap-core-optional-x.x.x(-SNAPSHOT).jar not found"); } else { JarFile bootStrapCoreOptionalJarFile = getJarFile(bootStrapCoreOptionalJar); if (bootStrapCoreOptionalJarFile == null) { logger.info("pinpoint-bootstrap-core-optional-x.x.x(-SNAPSHOT).jar not found"); } else { bootstrapJarFile.append(bootStrapCoreOptionalJarFile); } } // 5th find annotations.jar : optional dependency final String annotationsJar = getAnnotationsJar(); if (annotationsJar == null) { logger.info("pinpoint-annotations-x.x.x(-SNAPSHOT).jar not found"); } else { JarFile jarFile = getJarFile(annotationsJar); bootstrapJarFile.append(jarFile); } this.bootstrapJarFile = bootstrapJarFile; return true; } public void setClassPathFromSystemProperty() { this.classPath = getClassPathFromSystemProperty(); } @Override public BootstrapJarFile getBootstrapJarFile() { return bootstrapJarFile; } public static String getClassPathFromSystemProperty() { return System.getProperty("java.class.path"); } boolean findAgentJar() { Matcher matcher = agentPattern.matcher(classPath); if (!matcher.find()) { return false; } this.agentJarName = parseAgentJar(matcher); this.agentJarFullPath = parseAgentJarPath(classPath, agentJarName); if (agentJarFullPath == null) { return false; } this.agentDirPath = parseAgentDirPath(agentJarFullPath); if (agentDirPath == null) { return false; } logger.info("Agent original-path:" + agentDirPath); // defense alias change this.agentDirPath = toCanonicalPath(agentDirPath); logger.info("Agent canonical-path:" + agentDirPath); this.pinpointCommonsJar = findFromBootDir("pinpoint-commons.jar", agentCommonsPattern); this.bootStrapCoreJar = findFromBootDir("pinpoint-bootstrap-core.jar", agentCorePattern); this.bootStrapCoreOptionalJar = findFromBootDir("pinpoint-bootstrap-core-optional.jar", agentCoreOptionalPattern); this.annotationsJar = findFromBootDir("pinpoint-annotations.jar", annotationsPattern); return true; } private String toCanonicalPath(String path) { final File file = new File(path); return toCanonicalPath(file); } private String toCanonicalPath(File file) { try { return file.getCanonicalPath(); } catch (IOException e) { logger.warn(file.getPath() + " getCanonicalPath() error. Error:" + e.getMessage(), e); return file.getAbsolutePath(); } } private String findFromBootDir(final String name, final Pattern pattern) { String bootDirPath = agentDirPath + File.separator + "boot"; final File[] files = listFiles(name, pattern, bootDirPath); if (isEmpty(files)) { logger.info(name + " not found."); return null; } else if (files.length == 1) { File file = files[0]; return toCanonicalPath(file); } else { logger.info("too many " + name + " found. " + Arrays.toString(files)); return null; } } private boolean isEmpty(File[] files) { return files == null || files.length == 0; } private File[] listFiles(final String name, final Pattern pattern, String bootDirPath) { File bootDir = new File(bootDirPath); return bootDir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String fileName) { Matcher matcher = pattern.matcher(fileName); if (matcher.matches()) { logger.info("found " + name + ". " + dir.getAbsolutePath() + File.separator + fileName); return true; } return false; } }); } @Override public String getPinpointCommonsJar() { return pinpointCommonsJar; } @Override public String getBootStrapCoreJar() { return bootStrapCoreJar; } @Override public String getBootStrapCoreOptionalJar() { return bootStrapCoreOptionalJar; } public String getAnnotationsJar() { return annotationsJar; } private String parseAgentJar(Matcher matcher) { int start = matcher.start(); int end = matcher.end(); return this.classPath.substring(start, end); } @Override public String getAgentJarName() { return this.agentJarName; } private String parseAgentJarPath(String classPath, String agentJar) { String[] classPathList = classPath.split(File.pathSeparator); for (String findPath : classPathList) { boolean find = findPath.contains(agentJar); if (find) { return findPath; } } return null; } @Override public String getAgentJarFullPath() { return agentJarFullPath; } @Override public String getAgentLibPath() { return this.agentDirPath + File.separator + "lib"; } @Override public String getAgentLogFilePath() { return this.agentDirPath + File.separator + "log"; } @Override public String getAgentPluginPath() { return this.agentDirPath + File.separator + "plugin"; } @Override public List<URL> resolveLib() { String agentLibPath = getAgentLibPath(); File libDir = new File(agentLibPath); if (!libDir.exists()) { logger.warn(agentLibPath + " not found"); return Collections.emptyList(); } if (!libDir.isDirectory()) { logger.warn(agentLibPath + " not Directory"); return Collections.emptyList(); } final List<URL> jarURLList = new ArrayList<URL>(); final File[] findJarList = findJar(libDir); if (findJarList != null) { for (File file : findJarList) { URL url = toURI(file); if (url != null) { jarURLList.add(url); } } } URL agentDirUri = toURI(new File(agentLibPath)); if (agentDirUri != null) { jarURLList.add(agentDirUri); } // hot fix. boot jars not found from classPool ?? jarURLList.add(toURI(new File(getPinpointCommonsJar()))); jarURLList.add(toURI(new File(getBootStrapCoreJar()))); String bootstrapCoreOptionalJar = getBootStrapCoreOptionalJar(); // bootstrap-core-optional jar is not required and is okay to be null if (bootstrapCoreOptionalJar != null) { jarURLList.add(toURI(new File(bootstrapCoreOptionalJar))); } return jarURLList; } @Override public URL[] resolvePlugins() { final File file = new File(getAgentPluginPath()); if (!file.exists()) { logger.warn(file + " not found"); return new URL[0]; } if (!file.isDirectory()) { logger.warn(file + " is not a directory"); return new URL[0]; } final File[] jars = file.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith(".jar"); } }); if (isEmpty(jars)) { return new URL[0]; } final URL[] urls = new URL[jars.length]; for (int i = 0; i < jars.length; i++) { try { urls[i] = jars[i].toURI().toURL(); } catch (MalformedURLException e) { // TODO have to change to PinpointException AFTER moving the exception to pinpoint-common throw new RuntimeException("Fail to load plugin jars", e); } } for (File pluginJar : jars) { logger.info("Found plugins: " + pluginJar.getPath()); } return urls; } private URL toURI(File file) { URI uri = file.toURI(); try { return uri.toURL(); } catch (MalformedURLException e) { logger.warn(file.getName() + ".toURL() failed.", e); return null; } } private File[] findJar(File libDir) { return libDir.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { String path = pathname.getName(); for (String extension : fileExtensionList) { if (path.lastIndexOf("." + extension) != -1) { return true; } } return false; } }); } private String parseAgentDirPath(String agentJarFullPath) { int index1 = agentJarFullPath.lastIndexOf("/"); int index2 = agentJarFullPath.lastIndexOf("\\"); int max = Math.max(index1, index2); if (max == -1) { return null; } return agentJarFullPath.substring(0, max); } @Override public String getAgentDirPath() { return agentDirPath; } @Override public String getAgentConfigPath() { return agentDirPath + File.separator + "pinpoint.config"; } private JarFile getJarFile(String jarFilePath) { try { return new JarFile(jarFilePath); } catch (IOException ioe) { logger.warn(jarFilePath + " file not found. Error:" + ioe.getMessage(), ioe); return null; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.data.conversion; import org.apache.flink.annotation.Internal; import org.apache.flink.table.api.TableException; import org.apache.flink.table.data.ArrayData; import org.apache.flink.table.data.DecimalData; import org.apache.flink.table.data.MapData; import org.apache.flink.table.data.RawValueData; import org.apache.flink.table.data.RowData; import org.apache.flink.table.data.StringData; import org.apache.flink.table.data.TimestampData; import org.apache.flink.table.types.DataType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.LogicalTypeRoot; import org.apache.flink.types.Row; import java.math.BigDecimal; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.function.Supplier; /** * Registry of available data structure converters. * * <p>Data structure converters are used at the edges for the API for converting between internal * structures (see {@link RowData}) and external structures (see {@link * DataType#getConversionClass()}). * * <p>This is useful for UDFs, sources, sinks, or exposing data in the API (e.g. via a {@code * collect()}). * * <p>Note: It is NOT the responsibility of a converter to normalize the data. Thus, a converter * does neither change the precision of a timestamp nor prune/expand strings to their defined * length. This might be the responsibility of data classes that are called transitively. */ @Internal public final class DataStructureConverters { private static final Map<ConverterIdentifier<?>, DataStructureConverterFactory> converters = new HashMap<>(); static { // ordered by type root and conversion class definition putConverter(LogicalTypeRoot.CHAR, String.class, constructor(StringStringConverter::new)); putConverter( LogicalTypeRoot.CHAR, byte[].class, constructor(StringByteArrayConverter::new)); putConverter(LogicalTypeRoot.CHAR, StringData.class, identity()); putConverter( LogicalTypeRoot.VARCHAR, String.class, constructor(StringStringConverter::new)); putConverter( LogicalTypeRoot.VARCHAR, byte[].class, constructor(StringByteArrayConverter::new)); putConverter(LogicalTypeRoot.VARCHAR, StringData.class, identity()); putConverter(LogicalTypeRoot.BOOLEAN, Boolean.class, identity()); putConverter(LogicalTypeRoot.BOOLEAN, boolean.class, identity()); putConverter(LogicalTypeRoot.BINARY, byte[].class, identity()); putConverter(LogicalTypeRoot.VARBINARY, byte[].class, identity()); putConverter(LogicalTypeRoot.DECIMAL, BigDecimal.class, DecimalBigDecimalConverter::create); putConverter(LogicalTypeRoot.DECIMAL, DecimalData.class, identity()); putConverter(LogicalTypeRoot.TINYINT, Byte.class, identity()); putConverter(LogicalTypeRoot.TINYINT, byte.class, identity()); putConverter(LogicalTypeRoot.SMALLINT, Short.class, identity()); putConverter(LogicalTypeRoot.SMALLINT, short.class, identity()); putConverter(LogicalTypeRoot.INTEGER, Integer.class, identity()); putConverter(LogicalTypeRoot.INTEGER, int.class, identity()); putConverter(LogicalTypeRoot.BIGINT, Long.class, identity()); putConverter(LogicalTypeRoot.BIGINT, long.class, identity()); putConverter(LogicalTypeRoot.FLOAT, Float.class, identity()); putConverter(LogicalTypeRoot.FLOAT, float.class, identity()); putConverter(LogicalTypeRoot.DOUBLE, Double.class, identity()); putConverter(LogicalTypeRoot.DOUBLE, double.class, identity()); putConverter( LogicalTypeRoot.DATE, java.sql.Date.class, constructor(DateDateConverter::new)); putConverter( LogicalTypeRoot.DATE, java.time.LocalDate.class, constructor(DateLocalDateConverter::new)); putConverter(LogicalTypeRoot.DATE, Integer.class, identity()); putConverter(LogicalTypeRoot.DATE, int.class, identity()); putConverter( LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE, java.sql.Time.class, constructor(TimeTimeConverter::new)); putConverter( LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE, java.time.LocalTime.class, constructor(TimeLocalTimeConverter::new)); putConverter(LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE, Integer.class, identity()); putConverter(LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE, int.class, identity()); putConverter( LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE, Long.class, constructor(TimeLongConverter::new)); putConverter( LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE, long.class, constructor(TimeLongConverter::new)); putConverter( LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE, java.sql.Timestamp.class, constructor(TimestampTimestampConverter::new)); putConverter( LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE, java.time.LocalDateTime.class, constructor(TimestampLocalDateTimeConverter::new)); putConverter(LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE, TimestampData.class, identity()); putConverter( LogicalTypeRoot.TIMESTAMP_WITH_TIME_ZONE, java.time.ZonedDateTime.class, unsupported()); putConverter( LogicalTypeRoot.TIMESTAMP_WITH_TIME_ZONE, java.time.OffsetDateTime.class, unsupported()); putConverter( LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE, java.time.Instant.class, constructor(LocalZonedTimestampInstantConverter::new)); putConverter( LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE, Integer.class, constructor(LocalZonedTimestampIntConverter::new)); putConverter( LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE, int.class, constructor(LocalZonedTimestampIntConverter::new)); putConverter( LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE, Long.class, constructor(LocalZonedTimestampLongConverter::new)); putConverter( LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE, long.class, constructor(LocalZonedTimestampLongConverter::new)); putConverter( LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE, java.sql.Timestamp.class, constructor(LocalZonedTimestampTimestampConverter::new)); putConverter( LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE, TimestampData.class, identity()); putConverter( LogicalTypeRoot.INTERVAL_YEAR_MONTH, java.time.Period.class, YearMonthIntervalPeriodConverter::create); putConverter(LogicalTypeRoot.INTERVAL_YEAR_MONTH, Integer.class, identity()); putConverter(LogicalTypeRoot.INTERVAL_YEAR_MONTH, int.class, identity()); putConverter( LogicalTypeRoot.INTERVAL_DAY_TIME, java.time.Duration.class, constructor(DayTimeIntervalDurationConverter::new)); putConverter(LogicalTypeRoot.INTERVAL_DAY_TIME, Long.class, identity()); putConverter(LogicalTypeRoot.INTERVAL_DAY_TIME, long.class, identity()); putConverter(LogicalTypeRoot.ARRAY, ArrayData.class, identity()); putConverter( LogicalTypeRoot.ARRAY, boolean[].class, constructor(ArrayBooleanArrayConverter::new)); putConverter( LogicalTypeRoot.ARRAY, byte[].class, constructor(ArrayByteArrayConverter::new)); putConverter( LogicalTypeRoot.ARRAY, short[].class, constructor(ArrayShortArrayConverter::new)); putConverter(LogicalTypeRoot.ARRAY, int[].class, constructor(ArrayIntArrayConverter::new)); putConverter( LogicalTypeRoot.ARRAY, long[].class, constructor(ArrayLongArrayConverter::new)); putConverter( LogicalTypeRoot.ARRAY, float[].class, constructor(ArrayFloatArrayConverter::new)); putConverter( LogicalTypeRoot.ARRAY, double[].class, constructor(ArrayDoubleArrayConverter::new)); putConverter(LogicalTypeRoot.MULTISET, MapData.class, identity()); putConverter(LogicalTypeRoot.MAP, MapData.class, identity()); putConverter(LogicalTypeRoot.ROW, Row.class, RowRowConverter::create); putConverter(LogicalTypeRoot.ROW, RowData.class, identity()); putConverter(LogicalTypeRoot.STRUCTURED_TYPE, Row.class, RowRowConverter::create); putConverter(LogicalTypeRoot.STRUCTURED_TYPE, RowData.class, identity()); putConverter(LogicalTypeRoot.RAW, byte[].class, RawByteArrayConverter::create); putConverter(LogicalTypeRoot.RAW, RawValueData.class, identity()); } /** Returns a converter for the given {@link DataType}. */ @SuppressWarnings("unchecked") public static DataStructureConverter<Object, Object> getConverter(DataType dataType) { // cast to Object for ease of use return (DataStructureConverter<Object, Object>) getConverterInternal(dataType); } private static DataStructureConverter<?, ?> getConverterInternal(DataType dataType) { final LogicalType logicalType = dataType.getLogicalType(); final DataStructureConverterFactory factory = converters.get( new ConverterIdentifier<>( logicalType.getTypeRoot(), dataType.getConversionClass())); if (factory != null) { return factory.createConverter(dataType); } // special cases switch (logicalType.getTypeRoot()) { case ARRAY: // for subclasses of List if (List.class.isAssignableFrom(dataType.getConversionClass())) { return ArrayListConverter.create(dataType); } // for non-primitive arrays return ArrayObjectArrayConverter.create(dataType); case MULTISET: // for subclasses of Map return MapMapConverter.createForMultisetType(dataType); case MAP: // for subclasses of Map return MapMapConverter.createForMapType(dataType); case DISTINCT_TYPE: return getConverterInternal(dataType.getChildren().get(0)); case STRUCTURED_TYPE: return StructuredObjectConverter.create(dataType); case RAW: return RawObjectConverter.create(dataType); default: throw new TableException("Could not find converter for data type: " + dataType); } } // -------------------------------------------------------------------------------------------- // Helper methods // -------------------------------------------------------------------------------------------- private static <E> void putConverter( LogicalTypeRoot root, Class<E> conversionClass, DataStructureConverterFactory factory) { converters.put(new ConverterIdentifier<>(root, conversionClass), factory); } private static DataStructureConverterFactory identity() { return constructor(IdentityConverter::new); } private static DataStructureConverterFactory constructor( Supplier<DataStructureConverter<?, ?>> supplier) { return dataType -> supplier.get(); } private static DataStructureConverterFactory unsupported() { return dataType -> { throw new TableException("Unsupported data type: " + dataType); }; } // -------------------------------------------------------------------------------------------- // Helper classes // -------------------------------------------------------------------------------------------- private static class ConverterIdentifier<E> { final LogicalTypeRoot root; final Class<E> conversionClass; ConverterIdentifier(LogicalTypeRoot root, Class<E> conversionClass) { this.root = root; this.conversionClass = conversionClass; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ConverterIdentifier<?> that = (ConverterIdentifier<?>) o; return root == that.root && conversionClass.equals(that.conversionClass); } @Override public int hashCode() { return Objects.hash(root, conversionClass); } } private interface DataStructureConverterFactory { DataStructureConverter<?, ?> createConverter(DataType dt); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.security; import static org.apache.hadoop.hbase.ipc.TestProtobufRpcServiceImpl.SERVICE; import static org.apache.hadoop.hbase.ipc.TestProtobufRpcServiceImpl.newBlockingStub; import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getKeytabFileForTesting; import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getPrincipalForTesting; import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getSecuredConfiguration; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; import static org.junit.Assert.fail; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import javax.security.sasl.SaslException; import org.apache.commons.lang3.RandomStringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ipc.BlockingRpcClient; import org.apache.hadoop.hbase.ipc.FifoRpcScheduler; import org.apache.hadoop.hbase.ipc.NettyRpcClient; import org.apache.hadoop.hbase.ipc.NettyRpcServer; import org.apache.hadoop.hbase.ipc.RpcClient; import org.apache.hadoop.hbase.ipc.RpcClientFactory; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.RpcServerFactory; import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.ipc.SimpleRpcServer; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.minikdc.MiniKdc; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; import org.mockito.Mockito; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hbase.thirdparty.com.google.protobuf.BlockingService; import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos; import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface; @RunWith(Parameterized.class) @Category({ SecurityTests.class, MediumTests.class }) public class TestSecureIPC { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestSecureIPC.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final File KEYTAB_FILE = new File( TEST_UTIL.getDataTestDir("keytab").toUri().getPath()); private static MiniKdc KDC; private static String HOST = "localhost"; private static String PRINCIPAL; String krbKeytab; String krbPrincipal; UserGroupInformation ugi; Configuration clientConf; Configuration serverConf; @Rule public ExpectedException exception = ExpectedException.none(); @Parameters(name = "{index}: rpcClientImpl={0}, rpcServerImpl={1}") public static Collection<Object[]> parameters() { List<Object[]> params = new ArrayList<>(); List<String> rpcClientImpls = Arrays.asList( BlockingRpcClient.class.getName(), NettyRpcClient.class.getName()); List<String> rpcServerImpls = Arrays.asList( SimpleRpcServer.class.getName(), NettyRpcServer.class.getName()); for (String rpcClientImpl : rpcClientImpls) { for (String rpcServerImpl : rpcServerImpls) { params.add(new Object[] { rpcClientImpl, rpcServerImpl }); } } return params; } @Parameter(0) public String rpcClientImpl; @Parameter(1) public String rpcServerImpl; @BeforeClass public static void setUp() throws Exception { KDC = TEST_UTIL.setupMiniKdc(KEYTAB_FILE); PRINCIPAL = "hbase/" + HOST; KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL); HBaseKerberosUtils.setPrincipalForTesting(PRINCIPAL + "@" + KDC.getRealm()); } @AfterClass public static void tearDown() throws IOException { if (KDC != null) { KDC.stop(); } TEST_UTIL.cleanupTestDir(); } @Before public void setUpTest() throws Exception { krbKeytab = getKeytabFileForTesting(); krbPrincipal = getPrincipalForTesting(); ugi = loginKerberosPrincipal(krbKeytab, krbPrincipal); clientConf = getSecuredConfiguration(); clientConf.set(RpcClientFactory.CUSTOM_RPC_CLIENT_IMPL_CONF_KEY, rpcClientImpl); serverConf = getSecuredConfiguration(); serverConf.set(RpcServerFactory.CUSTOM_RPC_SERVER_IMPL_CONF_KEY, rpcServerImpl); } @Test public void testRpcCallWithEnabledKerberosSaslAuth() throws Exception { UserGroupInformation ugi2 = UserGroupInformation.getCurrentUser(); // check that the login user is okay: assertSame(ugi2, ugi); assertEquals(AuthenticationMethod.KERBEROS, ugi.getAuthenticationMethod()); assertEquals(krbPrincipal, ugi.getUserName()); callRpcService(User.create(ugi2)); } @Test public void testRpcFallbackToSimpleAuth() throws Exception { String clientUsername = "testuser"; UserGroupInformation clientUgi = UserGroupInformation.createUserForTesting(clientUsername, new String[] { clientUsername }); // check that the client user is insecure assertNotSame(ugi, clientUgi); assertEquals(AuthenticationMethod.SIMPLE, clientUgi.getAuthenticationMethod()); assertEquals(clientUsername, clientUgi.getUserName()); clientConf.set(User.HBASE_SECURITY_CONF_KEY, "simple"); serverConf.setBoolean(RpcServer.FALLBACK_TO_INSECURE_CLIENT_AUTH, true); callRpcService(User.create(clientUgi)); } void setRpcProtection(String clientProtection, String serverProtection) { clientConf.set("hbase.rpc.protection", clientProtection); serverConf.set("hbase.rpc.protection", serverProtection); } /** * Test various combinations of Server and Client qops. * @throws Exception */ @Test public void testSaslWithCommonQop() throws Exception { setRpcProtection("privacy,authentication", "authentication"); callRpcService(User.create(ugi)); setRpcProtection("authentication", "privacy,authentication"); callRpcService(User.create(ugi)); setRpcProtection("integrity,authentication", "privacy,authentication"); callRpcService(User.create(ugi)); setRpcProtection("integrity,authentication", "integrity,authentication"); callRpcService(User.create(ugi)); setRpcProtection("privacy,authentication", "privacy,authentication"); callRpcService(User.create(ugi)); } @Test public void testSaslNoCommonQop() throws Exception { exception.expect(SaslException.class); exception.expectMessage("No common protection layer between client and server"); setRpcProtection("integrity", "privacy"); callRpcService(User.create(ugi)); } /** * Test sasl encryption with Crypto AES. * @throws Exception */ @Test public void testSaslWithCryptoAES() throws Exception { setRpcProtection("privacy", "privacy"); setCryptoAES("true", "true"); callRpcService(User.create(ugi)); } /** * Test various combinations of Server and Client configuration for Crypto AES. * @throws Exception */ @Test public void testDifferentConfWithCryptoAES() throws Exception { setRpcProtection("privacy", "privacy"); setCryptoAES("false", "true"); callRpcService(User.create(ugi)); setCryptoAES("true", "false"); try { callRpcService(User.create(ugi)); fail("The exception should be thrown out for the rpc timeout."); } catch (Exception e) { // ignore the expected exception } } void setCryptoAES(String clientCryptoAES, String serverCryptoAES) { clientConf.set("hbase.rpc.crypto.encryption.aes.enabled", clientCryptoAES); serverConf.set("hbase.rpc.crypto.encryption.aes.enabled", serverCryptoAES); } private UserGroupInformation loginKerberosPrincipal(String krbKeytab, String krbPrincipal) throws Exception { Configuration cnf = new Configuration(); cnf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); UserGroupInformation.setConfiguration(cnf); UserGroupInformation.loginUserFromKeytab(krbPrincipal, krbKeytab); return UserGroupInformation.getLoginUser(); } /** * Sets up a RPC Server and a Client. Does a RPC checks the result. If an exception is thrown from * the stub, this function will throw root cause of that exception. */ private void callRpcService(User clientUser) throws Exception { SecurityInfo securityInfoMock = Mockito.mock(SecurityInfo.class); Mockito.when(securityInfoMock.getServerPrincipal()) .thenReturn(HBaseKerberosUtils.KRB_PRINCIPAL); SecurityInfo.addInfo("TestProtobufRpcProto", securityInfoMock); InetSocketAddress isa = new InetSocketAddress(HOST, 0); RpcServerInterface rpcServer = RpcServerFactory.createRpcServer(null, "AbstractTestSecureIPC", Lists.newArrayList(new RpcServer.BlockingServiceAndInterface((BlockingService) SERVICE, null)), isa, serverConf, new FifoRpcScheduler(serverConf, 1)); rpcServer.start(); try (RpcClient rpcClient = RpcClientFactory.createClient(clientConf, HConstants.DEFAULT_CLUSTER_ID.toString())) { BlockingInterface stub = newBlockingStub(rpcClient, rpcServer.getListenerAddress(), clientUser); TestThread th1 = new TestThread(stub); final Throwable exception[] = new Throwable[1]; Collections.synchronizedList(new ArrayList<Throwable>()); Thread.UncaughtExceptionHandler exceptionHandler = new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread th, Throwable ex) { exception[0] = ex; } }; th1.setUncaughtExceptionHandler(exceptionHandler); th1.start(); th1.join(); if (exception[0] != null) { // throw root cause. while (exception[0].getCause() != null) { exception[0] = exception[0].getCause(); } throw (Exception) exception[0]; } } finally { rpcServer.stop(); } } public static class TestThread extends Thread { private final BlockingInterface stub; public TestThread(BlockingInterface stub) { this.stub = stub; } @Override public void run() { try { int[] messageSize = new int[] { 100, 1000, 10000 }; for (int i = 0; i < messageSize.length; i++) { String input = RandomStringUtils.random(messageSize[i]); String result = stub .echo(null, TestProtos.EchoRequestProto.newBuilder().setMessage(input).build()) .getMessage(); assertEquals(input, result); } } catch (org.apache.hbase.thirdparty.com.google.protobuf.ServiceException e) { throw new RuntimeException(e); } } } }
// -*- mode: java; c-basic-offset: 2; -*- // Copyright 2009-2011 Google, All Rights reserved // Copyright 2011-2012 MIT, All rights reserved // Released under the Apache License, Version 2.0 // http://www.apache.org/licenses/LICENSE-2.0 package com.google.appinventor.components.runtime; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.io.File; import twitter4j.DirectMessage; import twitter4j.IDs; import twitter4j.Query; import twitter4j.Status; import twitter4j.StatusUpdate; import twitter4j.TwitterException; import twitter4j.TwitterFactory; import twitter4j.User; import twitter4j.MediaEntity; import twitter4j.auth.AccessToken; import twitter4j.auth.RequestToken; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.os.Handler; import android.util.Log; import com.google.appinventor.components.annotations.DesignerComponent; import com.google.appinventor.components.annotations.DesignerProperty; import com.google.appinventor.components.annotations.PropertyCategory; import com.google.appinventor.components.annotations.SimpleEvent; import com.google.appinventor.components.annotations.SimpleFunction; import com.google.appinventor.components.annotations.SimpleObject; import com.google.appinventor.components.annotations.SimpleProperty; import com.google.appinventor.components.annotations.UsesLibraries; import com.google.appinventor.components.annotations.UsesPermissions; import com.google.appinventor.components.common.ComponentCategory; import com.google.appinventor.components.common.PropertyTypeConstants; import com.google.appinventor.components.common.YaVersion; import com.google.appinventor.components.runtime.util.AsynchUtil; import com.google.appinventor.components.runtime.util.ErrorMessages; /** * Component for accessing Twitter. * * @author sharon@google.com (Sharon Perl) - added OAuth support * @author ajcolter@gmail.com (Aubrey Colter) - added the twitter4j 2.2.6 jars * @author josmasflores@gmail.com (Jose Dominguez) - added the twitter4j 3.0.3 jars and fixed auth bug 2413 * @author edwinhzhang@gmail.com (Edwin Zhang) - added twitter4j-media-support-3.03 jar, status + image upload */ @DesignerComponent(version = YaVersion.TWITTER_COMPONENT_VERSION, description = "A non-visible component that enables communication " + "with <a href=\"http://www.twitter.com\" target=\"_blank\">Twitter</a>. " + "Once a user has logged into their Twitter account (and the authorization has been confirmed successful by the " + "<code>IsAuthorized</code> event), many more operations are available:<ul>" + "<li> Searching Twitter for tweets or labels (<code>SearchTwitter</code>)</li>\n" + "<li> Sending a Tweet (<code>Tweet</code>)" + " </li>\n" + "<li> Sending a Tweet with an Image (<code>TweetWithImage</code>)" + " </li>\n" + "<li> Directing a message to a specific user " + " (<code>DirectMessage</code>)</li>\n " + "<li> Receiving the most recent messages directed to the logged-in user " + " (<code>RequestDirectMessages</code>)</li>\n " + "<li> Following a specific user (<code>Follow</code>)</li>\n" + "<li> Ceasing to follow a specific user (<code>StopFollowing</code>)</li>\n" + "<li> Getting a list of users following the logged-in user " + " (<code>RequestFollowers</code>)</li>\n " + "<li> Getting the most recent messages of users followed by the " + " logged-in user (<code>RequestFriendTimeline</code>)</li>\n " + "<li> Getting the most recent mentions of the logged-in user " + " (<code>RequestMentions</code>)</li></ul></p>\n " + "<p>You must obtain a Consumer Key and Consumer Secret for Twitter authorization " + " specific to your app from http://twitter.com/oauth_clients/new", category = ComponentCategory.SOCIAL, nonVisible = true, iconName = "images/twitter.png") @SimpleObject @UsesPermissions(permissionNames = "android.permission.INTERNET") @UsesLibraries(libraries = "twitter4j.jar," + "twitter4jmedia.jar") public final class Twitter extends AndroidNonvisibleComponent implements ActivityResultListener, Component { private static final String ACCESS_TOKEN_TAG = "TwitterOauthAccessToken"; private static final String ACCESS_SECRET_TAG = "TwitterOauthAccessSecret"; private static final String MAX_CHARACTERS = "160"; private static final String URL_HOST = "twitter"; private static final String CALLBACK_URL = Form.APPINVENTOR_URL_SCHEME + "://" + URL_HOST; private static final String WEBVIEW_ACTIVITY_CLASS = WebViewActivity.class .getName(); // the following fields should only be accessed from the UI thread private String consumerKey = ""; private String consumerSecret = ""; private String TwitPic_API_Key = ""; private final List<String> mentions; private final List<String> followers; private final List<List<String>> timeline; private final List<String> directMessages; private final List<String> searchResults; // the following final fields are not synchronized -- twitter4j is thread // safe as of 2.2.6 private twitter4j.Twitter twitter; private RequestToken requestToken; private AccessToken accessToken; private String userName = ""; private final SharedPreferences sharedPreferences; private final int requestCode; private final ComponentContainer container; private final Handler handler; // Logging private final String TAG = "Twitter"; // TODO(sharon): twitter4j apparently has an asynchronous interface // (AsynchTwitter). // We should consider whether it has any advantages over AsynchUtil. /** * The maximum number of mentions returned by the following methods: * * <table> * <tr> * <td>component</td> * <td>twitter4j library</td> * <td>twitter API</td> * </tr> * <tr> * <td>RequestMentions</td> * <td>getMentions</td> * <td>statuses/mentions</td> * </tr> * <tr> * <td>RequestDirectMessages</td> * <td>getDirectMessages</td> * <td>direct_messages</td> * </tr> * </table> */ private static final String MAX_MENTIONS_RETURNED = "20"; public Twitter(ComponentContainer container) { super(container.$form()); this.container = container; handler = new Handler(); mentions = new ArrayList<String>(); followers = new ArrayList<String>(); timeline = new ArrayList<List<String>>(); directMessages = new ArrayList<String>(); searchResults = new ArrayList<String>(); sharedPreferences = container.$context().getSharedPreferences("Twitter", Context.MODE_PRIVATE); accessToken = retrieveAccessToken(); requestCode = form.registerForActivityResult(this); } /** * Logs in to Twitter with a username and password. */ @Deprecated @SimpleFunction(userVisible = false, description = "Twitter's API no longer supports login via username and " + "password. Use the Authorize call instead.") public void Login(String username, String password) { form.dispatchErrorOccurredEvent(this, "Login", ErrorMessages.ERROR_TWITTER_UNSUPPORTED_LOGIN_FUNCTION); } @SimpleProperty(category = PropertyCategory.BEHAVIOR, description = "The user name of the authorized user. Empty if " + "there is no authorized user.") public String Username() { return userName; } /** * ConsumerKey property getter method. */ @SimpleProperty(category = PropertyCategory.BEHAVIOR) public String ConsumerKey() { return consumerKey; } /** * ConsumerKey property setter method: sets the consumer key to be used when * authorizing with Twitter via OAuth. * * @param consumerKey * the key for use in Twitter OAuth */ @DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_STRING, defaultValue = "") @SimpleProperty(category = PropertyCategory.BEHAVIOR, description = "The the consumer key to be used when authorizing with Twitter via OAuth.") public void ConsumerKey(String consumerKey) { this.consumerKey = consumerKey; } /** * ConsumerSecret property getter method. */ @SimpleProperty(category = PropertyCategory.BEHAVIOR) public String ConsumerSecret() { return consumerSecret; } /** * ConsumerSecret property setter method: sets the consumer secret to be used * when authorizing with Twitter via OAuth. * * @param consumerSecret * the secret for use in Twitter OAuth */ @DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_STRING, defaultValue = "") @SimpleProperty(description="The consumer secret to be used when authorizing with Twitter via OAuth") public void ConsumerSecret(String consumerSecret) { this.consumerSecret = consumerSecret; } /** * TwitPicAPIkey property getter method. */ @Deprecated @SimpleProperty(userVisible = false, category = PropertyCategory.BEHAVIOR) public String TwitPic_API_Key() { return TwitPic_API_Key; } /** * TwitPicAPIkey property setter method: sets the TwitPicAPIkey to be used * for image uploading with twitter. * * @param TwitPic_API_Key * the API Key for image uploading, given by TwitPic */ @Deprecated // Hide the deprecated property from the Designer //@DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_STRING, defaultValue = "") @SimpleProperty(userVisible = false, category = PropertyCategory.BEHAVIOR, description="The API Key for image uploading, provided by TwitPic.") public void TwitPic_API_Key(String TwitPic_API_Key) { this.TwitPic_API_Key = TwitPic_API_Key; } /** * Indicates when the login has been successful. */ @SimpleEvent(description = "This event is raised after the program calls " + "<code>Authorize</code> if the authorization was successful. " + "It is also called after a call to <code>CheckAuthorized</code> " + "if we already have a valid access token. " + "After this event has been raised, any other method for this " + "component can be called.") public void IsAuthorized() { EventDispatcher.dispatchEvent(this, "IsAuthorized"); } /** * Authenticate to Twitter using OAuth */ @SimpleFunction(description = "Redirects user to login to Twitter via the Web browser using " + "the OAuth protocol if we don't already have authorization.") public void Authorize() { if (consumerKey.length() == 0 || consumerSecret.length() == 0) { form.dispatchErrorOccurredEvent(this, "Authorize", ErrorMessages.ERROR_TWITTER_BLANK_CONSUMER_KEY_OR_SECRET); return; } if (twitter == null) { twitter = new TwitterFactory().getInstance(); } final String myConsumerKey = consumerKey; final String myConsumerSecret = consumerSecret; AsynchUtil.runAsynchronously(new Runnable() { public void run() { if (checkAccessToken(myConsumerKey, myConsumerSecret)) { handler.post(new Runnable() { @Override public void run() { IsAuthorized(); } }); return; } try { // potentially time-consuming calls RequestToken newRequestToken; twitter.setOAuthConsumer(myConsumerKey, myConsumerSecret); newRequestToken = twitter.getOAuthRequestToken(CALLBACK_URL); String authURL = newRequestToken.getAuthorizationURL(); requestToken = newRequestToken; // request token will be // needed to get access token Intent browserIntent = new Intent(Intent.ACTION_MAIN, Uri .parse(authURL)); browserIntent.setClassName(container.$context(), WEBVIEW_ACTIVITY_CLASS); container.$context().startActivityForResult(browserIntent, requestCode); } catch (TwitterException e) { Log.i("Twitter", "Got exception: " + e.getMessage()); e.printStackTrace(); form.dispatchErrorOccurredEvent(Twitter.this, "Authorize", ErrorMessages.ERROR_TWITTER_EXCEPTION, e.getMessage()); DeAuthorize(); // clean up } catch (IllegalStateException ise){ //This should never happen cause it should return // at the if (checkAccessToken...). We mark as an error but let continue Log.e("Twitter", "OAuthConsumer was already set: launch IsAuthorized()"); handler.post(new Runnable() { @Override public void run() { IsAuthorized(); } }); } } }); } /** * Check whether we already have a valid Twitter access token */ @SimpleFunction(description = "Checks whether we already have access, and if so, causes " + "IsAuthorized event handler to be called.") public void CheckAuthorized() { final String myConsumerKey = consumerKey; final String myConsumerSecret = consumerSecret; AsynchUtil.runAsynchronously(new Runnable() { public void run() { if (checkAccessToken(myConsumerKey, myConsumerSecret)) { handler.post(new Runnable() { @Override public void run() { IsAuthorized(); } }); } } }); } /* * Get result from starting WebView activity to authorize access */ @Override public void resultReturned(int requestCode, int resultCode, Intent data) { Log.i("Twitter", "Got result " + resultCode); if (data != null) { Uri uri = data.getData(); if (uri != null) { Log.i("Twitter", "Intent URI: " + uri.toString()); final String oauthVerifier = uri.getQueryParameter("oauth_verifier"); if (twitter == null) { Log.e("Twitter", "twitter field is unexpectedly null"); form.dispatchErrorOccurredEvent(this, "Authorize", ErrorMessages.ERROR_TWITTER_UNABLE_TO_GET_ACCESS_TOKEN, "internal error: can't access Twitter library"); new RuntimeException().printStackTrace(); } if (requestToken != null && oauthVerifier != null && oauthVerifier.length() != 0) { AsynchUtil.runAsynchronously(new Runnable() { public void run() { try { AccessToken resultAccessToken; resultAccessToken = twitter.getOAuthAccessToken(requestToken, oauthVerifier); accessToken = resultAccessToken; userName = accessToken.getScreenName(); saveAccessToken(resultAccessToken); handler.post(new Runnable() { @Override public void run() { IsAuthorized(); } }); } catch (TwitterException e) { Log.e("Twitter", "Got exception: " + e.getMessage()); e.printStackTrace(); form.dispatchErrorOccurredEvent(Twitter.this, "Authorize", ErrorMessages.ERROR_TWITTER_UNABLE_TO_GET_ACCESS_TOKEN, e.getMessage()); deAuthorize(); // clean up } } }); } else { form.dispatchErrorOccurredEvent(this, "Authorize", ErrorMessages.ERROR_TWITTER_AUTHORIZATION_FAILED); deAuthorize(); // clean up } } else { Log.e("Twitter", "uri returned from WebView activity was unexpectedly null"); deAuthorize(); // clean up so we can call Authorize again } } else { Log.e("Twitter", "intent returned from WebView activity was unexpectedly null"); deAuthorize(); // clean up so we can call Authorize again } } private void saveAccessToken(AccessToken accessToken) { final SharedPreferences.Editor sharedPrefsEditor = sharedPreferences.edit(); if (accessToken == null) { sharedPrefsEditor.remove(ACCESS_TOKEN_TAG); sharedPrefsEditor.remove(ACCESS_SECRET_TAG); } else { sharedPrefsEditor.putString(ACCESS_TOKEN_TAG, accessToken.getToken()); sharedPrefsEditor.putString(ACCESS_SECRET_TAG, accessToken.getTokenSecret()); } sharedPrefsEditor.commit(); } private AccessToken retrieveAccessToken() { String token = sharedPreferences.getString(ACCESS_TOKEN_TAG, ""); String secret = sharedPreferences.getString(ACCESS_SECRET_TAG, ""); if (token.length() == 0 || secret.length() == 0) { return null; } return new AccessToken(token, secret); } /** * Remove authentication for this app instance */ @SimpleFunction(description = "Removes Twitter authorization from this running app instance") public void DeAuthorize() { deAuthorize(); } private void deAuthorize() { final twitter4j.Twitter oldTwitter; requestToken = null; accessToken = null; userName = ""; oldTwitter = twitter; twitter = null; // setting twitter to null gives us a quick check // that we don't have an authorized version around. saveAccessToken(accessToken); // clear the access token from the old twitter instance, just in case // someone stashed it away. if (oldTwitter != null) { oldTwitter.setOAuthAccessToken(null); } } /** * Sends a Tweet of the currently logged in user. */ @SimpleFunction(description = "This sends a tweet as the logged-in user with the " + "specified Text, which will be trimmed if it exceeds " + MAX_CHARACTERS + " characters. " + "<p><u>Requirements</u>: This should only be called after the " + "<code>IsAuthorized</code> event has been raised, indicating that the " + "user has successfully logged in to Twitter.</p>") public void Tweet(final String status) { if (twitter == null || userName.length() == 0) { form.dispatchErrorOccurredEvent(this, "Tweet", ErrorMessages.ERROR_TWITTER_SET_STATUS_FAILED, "Need to login?"); return; } // TODO(sharon): note that if the user calls DeAuthorize immediately // after // Tweet it is possible that the DeAuthorize call can slip in // and invalidate the authorization credentials for myTwitter, causing // the call below to fail. If we want to prevent this we could consider // using an ExecutorService object to serialize calls to Twitter. AsynchUtil.runAsynchronously(new Runnable() { public void run() { try { twitter.updateStatus(status); } catch (TwitterException e) { form.dispatchErrorOccurredEvent(Twitter.this, "Tweet", ErrorMessages.ERROR_TWITTER_SET_STATUS_FAILED, e.getMessage()); } } }); } /** * Get the image URL back after TweetwithImage uploads it to TwitPic */ @SimpleEvent(description = "This event is raised when the a twitter message with a picture " + "has been uploaded via <code>TweetWithImage</code>. " + "the uploaded image URL is in the <code>url</code> variable. ") public void ImageUploaded(final String url) { EventDispatcher.dispatchEvent(this, "ImageUploaded", url); return; } /** * Tweet with Image, Uploaded to Twitter */ @SimpleFunction(description = "This sends a tweet as the logged-in user with the " + "specified Text and a path to the image to be uploaded, which will be trimmed if it " + "exceeds " + MAX_CHARACTERS + " characters. " + "If an image is not found or invalid, only the text will be tweeted." + "<p><u>Requirements</u>: This should only be called after the " + "<code>IsAuthorized</code> event has been raised, indicating that the " + "user has successfully logged in to Twitter.</p>" ) public void TweetWithImage(final String status, final String imagePath) { if (twitter == null || userName.length() == 0) { form.dispatchErrorOccurredEvent(this, "TweetWithImage", ErrorMessages.ERROR_TWITTER_SET_STATUS_FAILED, "Need to login?"); return; } AsynchUtil.runAsynchronously(new Runnable() { String imageUrl; public void run() { try { String cleanImagePath = imagePath; // Clean up the file path if necessary if (cleanImagePath.startsWith("file://")) { cleanImagePath = imagePath.replace("file://", ""); Log.d(TAG, "The clean image path is "+ cleanImagePath); } File imageFilePath = new File(cleanImagePath); if (imageFilePath.exists()) { Log.d(TAG, "The clean image does exist"); StatusUpdate theTweet = new StatusUpdate(status); theTweet.setMedia(imageFilePath); Status st = twitter.updateStatus(theTweet); MediaEntity [] entities = st.getMediaEntities(); imageUrl = entities[0].getMediaURLHttps(); handler.post(new Runnable() { @Override public void run() { ImageUploaded(imageUrl); } }); } else { form.dispatchErrorOccurredEvent(Twitter.this, "TweetWithImage", ErrorMessages.ERROR_TWITTER_INVALID_IMAGE_PATH); } } catch (TwitterException e) { form.dispatchErrorOccurredEvent(Twitter.this, "TweetWithImage", ErrorMessages.ERROR_TWITTER_SET_STATUS_FAILED, e.getMessage()); } } }); } /** * Gets the most recent messages where your username is mentioned. */ @SimpleFunction(description = "Requests the " + MAX_MENTIONS_RETURNED + " most " + "recent mentions of the logged-in user. When the mentions have been " + "retrieved, the system will raise the <code>MentionsReceived</code> " + "event and set the <code>Mentions</code> property to the list of " + "mentions." + "<p><u>Requirements</u>: This should only be called after the " + "<code>IsAuthorized</code> event has been raised, indicating that the " + "user has successfully logged in to Twitter.</p>") public void RequestMentions() { if (twitter == null || userName.length() == 0) { form.dispatchErrorOccurredEvent(this, "RequestMentions", ErrorMessages.ERROR_TWITTER_REQUEST_MENTIONS_FAILED, "Need to login?"); return; } AsynchUtil.runAsynchronously(new Runnable() { List<Status> replies = Collections.emptyList(); public void run() { try { replies = twitter.getMentionsTimeline(); } catch (TwitterException e) { form.dispatchErrorOccurredEvent(Twitter.this, "RequestMentions", ErrorMessages.ERROR_TWITTER_REQUEST_MENTIONS_FAILED, e.getMessage()); } finally { handler.post(new Runnable() { public void run() { mentions.clear(); for (Status status : replies) { mentions.add(status.getUser().getScreenName() + " " + status.getText()); } MentionsReceived(mentions); } }); } } }); } /** * Indicates when all the mentions requested through * {@link #RequestMentions()} have been received. */ @SimpleEvent(description = "This event is raised when the mentions of the logged-in user " + "requested through <code>RequestMentions</code> have been retrieved. " + "A list of the mentions can then be found in the <code>mentions</code> " + "parameter or the <code>Mentions</code> property.") public void MentionsReceived(final List<String> mentions) { EventDispatcher.dispatchEvent(this, "MentionsReceived", mentions); } @SimpleProperty(category = PropertyCategory.BEHAVIOR, description = "This property contains a list of mentions of the " + "logged-in user. Initially, the list is empty. To set it, the " + "program must: <ol> " + "<li> Call the <code>Authorize</code> method.</li> " + "<li> Wait for the <code>IsAuthorized</code> event.</li> " + "<li> Call the <code>RequestMentions</code> method.</li> " + "<li> Wait for the <code>MentionsReceived</code> event.</li></ol>\n" + "The value of this property will then be set to the list of mentions " + "(and will maintain its value until any subsequent calls to " + "<code>RequestMentions</code>).") public List<String> Mentions() { return mentions; } /** * Gets who is following you. */ @SimpleFunction public void RequestFollowers() { if (twitter == null || userName.length() == 0) { form.dispatchErrorOccurredEvent(this, "RequestFollowers", ErrorMessages.ERROR_TWITTER_REQUEST_FOLLOWERS_FAILED, "Need to login?"); return; } AsynchUtil.runAsynchronously(new Runnable() { List<User> friends = new ArrayList<User>(); public void run() { try { IDs followerIDs = twitter.getFollowersIDs(-1); for (long id : followerIDs.getIDs()) { // convert from the IDs returned to the User friends.add(twitter.showUser(id)); } } catch (TwitterException e) { form.dispatchErrorOccurredEvent(Twitter.this, "RequestFollowers", ErrorMessages.ERROR_TWITTER_REQUEST_FOLLOWERS_FAILED, e.getMessage()); } finally { handler.post(new Runnable() { public void run() { followers.clear(); for (User user : friends) { followers.add(user.getName()); } FollowersReceived(followers); } }); } } }); } /** * Indicates when all of your followers requested through * {@link #RequestFollowers()} have been received. */ @SimpleEvent(description = "This event is raised when all of the followers of the " + "logged-in user requested through <code>RequestFollowers</code> have " + "been retrieved. A list of the followers can then be found in the " + "<code>followers</code> parameter or the <code>Followers</code> " + "property.") public void FollowersReceived(final List<String> followers2) { EventDispatcher.dispatchEvent(this, "FollowersReceived", followers2); } @SimpleProperty(category = PropertyCategory.BEHAVIOR, description = "This property contains a list of the followers of the " + "logged-in user. Initially, the list is empty. To set it, the " + "program must: <ol> " + "<li> Call the <code>Authorize</code> method.</li> " + "<li> Wait for the <code>IsAuthorized</code> event.</li> " + "<li> Call the <code>RequestFollowers</code> method.</li> " + "<li> Wait for the <code>FollowersReceived</code> event.</li></ol>\n" + "The value of this property will then be set to the list of " + "followers (and maintain its value until any subsequent call to " + "<code>RequestFollowers</code>).") public List<String> Followers() { return followers; } /** * Gets the most recent messages sent directly to you. */ @SimpleFunction(description = "Requests the " + MAX_MENTIONS_RETURNED + " most " + "recent direct messages sent to the logged-in user. When the " + "messages have been retrieved, the system will raise the " + "<code>DirectMessagesReceived</code> event and set the " + "<code>DirectMessages</code> property to the list of messages." + "<p><u>Requirements</u>: This should only be called after the " + "<code>IsAuthorized</code> event has been raised, indicating that the " + "user has successfully logged in to Twitter.</p>") public void RequestDirectMessages() { if (twitter == null || userName.length() == 0) { form.dispatchErrorOccurredEvent(this, "RequestDirectMessages", ErrorMessages.ERROR_TWITTER_REQUEST_DIRECT_MESSAGES_FAILED, "Need to login?"); return; } AsynchUtil.runAsynchronously(new Runnable() { List<DirectMessage> messages = Collections.emptyList(); @Override public void run() { try { messages = twitter.getDirectMessages(); } catch (TwitterException e) { form.dispatchErrorOccurredEvent(Twitter.this, "RequestDirectMessages", ErrorMessages.ERROR_TWITTER_REQUEST_DIRECT_MESSAGES_FAILED, e.getMessage()); } finally { handler.post(new Runnable() { @Override public void run() { directMessages.clear(); for (DirectMessage message : messages) { directMessages.add(message.getSenderScreenName() + " " + message.getText()); } DirectMessagesReceived(directMessages); } }); } } }); } /** * Indicates when all the direct messages requested through * {@link #RequestDirectMessages()} have been received. */ @SimpleEvent(description = "This event is raised when the recent messages " + "requested through <code>RequestDirectMessages</code> have " + "been retrieved. A list of the messages can then be found in the " + "<code>messages</code> parameter or the <code>Messages</code> " + "property.") public void DirectMessagesReceived(final List<String> messages) { EventDispatcher.dispatchEvent(this, "DirectMessagesReceived", messages); } @SimpleProperty(category = PropertyCategory.BEHAVIOR, description = "This property contains a list of the most recent " + "messages mentioning the logged-in user. Initially, the list is " + "empty. To set it, the program must: <ol> " + "<li> Call the <code>Authorize</code> method.</li> " + "<li> Wait for the <code>Authorized</code> event.</li> " + "<li> Call the <code>RequestDirectMessages</code> method.</li> " + "<li> Wait for the <code>DirectMessagesReceived</code> event.</li>" + "</ol>\n" + "The value of this property will then be set to the list of direct " + "messages retrieved (and maintain that value until any subsequent " + "call to <code>RequestDirectMessages</code>).") public List<String> DirectMessages() { return directMessages; } /** * Sends a direct message to a specified username. */ @SimpleFunction(description = "This sends a direct (private) message to the specified " + "user. The message will be trimmed if it exceeds " + MAX_CHARACTERS + "characters. " + "<p><u>Requirements</u>: This should only be called after the " + "<code>IsAuthorized</code> event has been raised, indicating that the " + "user has successfully logged in to Twitter.</p>") public void DirectMessage(final String user, final String message) { if (twitter == null || userName.length() == 0) { form.dispatchErrorOccurredEvent(this, "DirectMessage", ErrorMessages.ERROR_TWITTER_DIRECT_MESSAGE_FAILED, "Need to login?"); return; } AsynchUtil.runAsynchronously(new Runnable() { public void run() { try { twitter.sendDirectMessage(user, message); } catch (TwitterException e) { form.dispatchErrorOccurredEvent(Twitter.this, "DirectMessage", ErrorMessages.ERROR_TWITTER_DIRECT_MESSAGE_FAILED, e.getMessage()); } } }); } /** * Starts following a user. */ @SimpleFunction public void Follow(final String user) { if (twitter == null || userName.length() == 0) { form.dispatchErrorOccurredEvent(this, "Follow", ErrorMessages.ERROR_TWITTER_FOLLOW_FAILED, "Need to login?"); return; } AsynchUtil.runAsynchronously(new Runnable() { public void run() { try { twitter.createFriendship(user); } catch (TwitterException e) { form.dispatchErrorOccurredEvent(Twitter.this, "Follow", ErrorMessages.ERROR_TWITTER_FOLLOW_FAILED, e.getMessage()); } } }); } /** * Stops following a user. */ @SimpleFunction public void StopFollowing(final String user) { if (twitter == null || userName.length() == 0) { form.dispatchErrorOccurredEvent(this, "StopFollowing", ErrorMessages.ERROR_TWITTER_STOP_FOLLOWING_FAILED, "Need to login?"); return; } AsynchUtil.runAsynchronously(new Runnable() { public void run() { try { twitter.destroyFriendship(user); } catch (TwitterException e) { form.dispatchErrorOccurredEvent(Twitter.this, "StopFollowing", ErrorMessages.ERROR_TWITTER_STOP_FOLLOWING_FAILED, e.getMessage()); } } }); } /** * Gets the most recent 20 messages in the user's timeline. */ @SimpleFunction public void RequestFriendTimeline() { if (twitter == null || userName.length() == 0) { form.dispatchErrorOccurredEvent(this, "RequestFriendTimeline", ErrorMessages.ERROR_TWITTER_REQUEST_FRIEND_TIMELINE_FAILED, "Need to login?"); return; } AsynchUtil.runAsynchronously(new Runnable() { List<Status> messages = Collections.emptyList(); public void run() { try { messages = twitter.getHomeTimeline(); } catch (TwitterException e) { form.dispatchErrorOccurredEvent(Twitter.this, "RequestFriendTimeline", ErrorMessages.ERROR_TWITTER_REQUEST_FRIEND_TIMELINE_FAILED, e.getMessage()); } finally { handler.post(new Runnable() { public void run() { timeline.clear(); for (Status message : messages) { List<String> status = new ArrayList<String>(); status.add(message.getUser().getScreenName()); status.add(message.getText()); timeline.add(status); } FriendTimelineReceived(timeline); } }); } } }); } /** * Indicates when the friend timeline requested through * {@link #RequestFriendTimeline()} has been received. */ @SimpleEvent(description = "This event is raised when the messages " + "requested through <code>RequestFriendTimeline</code> have " + "been retrieved. The <code>timeline</code> parameter and the " + "<code>Timeline</code> property will contain a list of lists, where " + "each sub-list contains a status update of the form (username message)") public void FriendTimelineReceived(final List<List<String>> timeline) { EventDispatcher.dispatchEvent(this, "FriendTimelineReceived", timeline); } @SimpleProperty(category = PropertyCategory.BEHAVIOR, description = "This property contains the 20 most recent messages of " + "users being followed. Initially, the list is empty. To set it, " + "the program must: <ol> " + "<li> Call the <code>Authorize</code> method.</li> " + "<li> Wait for the <code>IsAuthorized</code> event.</li> " + "<li> Specify users to follow with one or more calls to the " + "<code>Follow</code> method.</li> " + "<li> Call the <code>RequestFriendTimeline</code> method.</li> " + "<li> Wait for the <code>FriendTimelineReceived</code> event.</li> " + "</ol>\n" + "The value of this property will then be set to the list of messages " + "(and maintain its value until any subsequent call to " + "<code>RequestFriendTimeline</code>.") public List<List<String>> FriendTimeline() { return timeline; } /** * Search for tweets or labels */ @SimpleFunction(description = "This searches Twitter for the given String query." + "<p><u>Requirements</u>: This should only be called after the " + "<code>IsAuthorized</code> event has been raised, indicating that the " + "user has successfully logged in to Twitter.</p>") public void SearchTwitter(final String query) { if (twitter == null || userName.length() == 0) { form.dispatchErrorOccurredEvent(this, "SearchTwitter", ErrorMessages.ERROR_TWITTER_SEARCH_FAILED, "Need to login?"); return; } AsynchUtil.runAsynchronously(new Runnable() { List<Status> tweets = Collections.emptyList(); public void run() { try { tweets = twitter.search(new Query(query)).getTweets(); } catch (TwitterException e) { form.dispatchErrorOccurredEvent(Twitter.this, "SearchTwitter", ErrorMessages.ERROR_TWITTER_SEARCH_FAILED, e.getMessage()); } finally { handler.post(new Runnable() { public void run() { searchResults.clear(); for (Status tweet : tweets) { searchResults.add(tweet.getUser().getName() + " " + tweet.getText()); } SearchSuccessful(searchResults); } }); } } }); } /** * Indicates when the search requested through {@link #SearchTwitter(String)} * has completed. */ @SimpleEvent(description = "This event is raised when the results of the search " + "requested through <code>SearchSuccessful</code> have " + "been retrieved. A list of the results can then be found in the " + "<code>results</code> parameter or the <code>Results</code> " + "property.") public void SearchSuccessful(final List<String> searchResults) { EventDispatcher.dispatchEvent(this, "SearchSuccessful", searchResults); } @SimpleProperty(category = PropertyCategory.BEHAVIOR, description = "This property, which is initially empty, is set to a " + "list of search results after the program: <ol>" + "<li>Calls the <code>SearchTwitter</code> method.</li> " + "<li>Waits for the <code>SearchSuccessful</code> event.</li></ol>\n" + "The value of the property will then be the same as the parameter to " + "<code>SearchSuccessful</code>. Note that it is not necessary to " + "call the <code>Authorize</code> method before calling " + "<code>SearchTwitter</code>.") public List<String> SearchResults() { return searchResults; } /** * Check whether accessToken is stored in preferences. If there is one, set it. * If it was already set (for instance calling Authorize twice in a row), * it will throw an IllegalStateException that, in this case, can be ignored. * @return true if accessToken is valid and set (user authorized), false otherwise. */ private boolean checkAccessToken(String myConsumerKey, String myConsumerSecret) { accessToken = retrieveAccessToken(); if (accessToken == null) { return false; } else { if (twitter == null) { twitter = new TwitterFactory().getInstance(); } try { twitter.setOAuthConsumer(consumerKey, consumerSecret); twitter.setOAuthAccessToken(accessToken); } catch (IllegalStateException ies) { //ignore: it means that the consumer data was already set } if (userName.trim().length() == 0) { User user; try { user = twitter.verifyCredentials(); userName = user.getScreenName(); } catch (TwitterException e) {// something went wrong (networks or bad credentials <-- DeAuthorize deAuthorize(); return false; } } return true; } } }
package com.mpatric.mp3agic; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import org.junit.Test; import static org.junit.Assert.*; public class ID3v2TagTest { private static final byte BYTE_I = 0x49; private static final byte BYTE_D = 0x44; private static final byte BYTE_3 = 0x33; private static final byte[] ID3V2_HEADER = {BYTE_I, BYTE_D, BYTE_3, 4, 0, 0, 0, 0, 2, 1}; @Test public void shouldInitialiseFromHeaderBlockWithValidHeaders() throws NoSuchTagException, UnsupportedTagException, InvalidDataException { byte[] header = BufferTools.copyBuffer(ID3V2_HEADER, 0, ID3V2_HEADER.length); header[3] = 2; header[4] = 0; ID3v2 id3v2tag; id3v2tag = createTag(header); assertEquals("2.0", id3v2tag.getVersion()); header[3] = 3; id3v2tag = createTag(header); assertEquals("3.0", id3v2tag.getVersion()); header[3] = 4; id3v2tag = createTag(header); assertEquals("4.0", id3v2tag.getVersion()); } @Test public void shouldCalculateCorrectDataLengthsFromHeaderBlock() throws NoSuchTagException, UnsupportedTagException, InvalidDataException { byte[] header = BufferTools.copyBuffer(ID3V2_HEADER, 0, ID3V2_HEADER.length); ID3v2 id3v2tag = createTag(header); assertEquals(257, id3v2tag.getDataLength()); header[8] = 0x09; header[9] = 0x41; id3v2tag = createTag(header); assertEquals(1217, id3v2tag.getDataLength()); } @Test public void shouldThrowExceptionForNonSupportedVersionInId3v2HeaderBlock() throws NoSuchTagException, InvalidDataException { byte[] header = BufferTools.copyBuffer(ID3V2_HEADER, 0, ID3V2_HEADER.length); header[3] = 5; header[4] = 0; try { ID3v2TagFactory.createTag(header); fail("UnsupportedTagException expected but not thrown"); } catch (UnsupportedTagException e) { // expected } } @Test public void shouldSortId3TagsAlphabetically() throws Exception { byte[] buffer = TestHelper.loadFile("src/test/resources/v1andv23tags.mp3"); ID3v2 id3v2tag = ID3v2TagFactory.createTag(buffer); Map<String, ID3v2FrameSet> frameSets = id3v2tag.getFrameSets(); Iterator<ID3v2FrameSet> frameSetIterator = frameSets.values().iterator(); String lastKey = ""; while (frameSetIterator.hasNext()) { ID3v2FrameSet frameSet = frameSetIterator.next(); assertTrue(frameSet.getId().compareTo(lastKey) > 0); lastKey = frameSet.getId(); } } @Test public void shouldReadFramesFromMp3With32Tag() throws IOException, NoSuchTagException, UnsupportedTagException, InvalidDataException { byte[] buffer = TestHelper.loadFile("src/test/resources/v1andv23tags.mp3"); ID3v2 id3v2tag = ID3v2TagFactory.createTag(buffer); assertEquals("3.0", id3v2tag.getVersion()); assertEquals(0x44B, id3v2tag.getLength()); assertEquals(12, id3v2tag.getFrameSets().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TENC")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("WXXX")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TCOP")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TOPE")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TCOM")).getFrames().size()); assertEquals(2, (id3v2tag.getFrameSets().get("COMM")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TPE1")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TALB")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TRCK")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TYER")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TCON")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TIT2")).getFrames().size()); } @Test public void shouldReadId3v2WithFooter() throws IOException, NoSuchTagException, UnsupportedTagException, InvalidDataException { byte[] buffer = TestHelper.loadFile("src/test/resources/v1andv24tags.mp3"); ID3v2 id3v2tag = ID3v2TagFactory.createTag(buffer); assertEquals("4.0", id3v2tag.getVersion()); assertEquals(0x44B, id3v2tag.getLength()); } @Test public void shouldReadTagFieldsFromMp3With24tag() throws Exception { byte[] buffer = TestHelper.loadFile("src/test/resources/v24tagswithalbumimage.mp3"); ID3v2 id3v24tag = ID3v2TagFactory.createTag(buffer); assertEquals("4.0", id3v24tag.getVersion()); assertEquals("1", id3v24tag.getTrack()); assertEquals("ARTIST123456789012345678901234", id3v24tag.getArtist()); assertEquals("TITLE1234567890123456789012345", id3v24tag.getTitle()); assertEquals("ALBUM1234567890123456789012345", id3v24tag.getAlbum()); assertEquals(0x0d, id3v24tag.getGenre()); assertEquals("Pop", id3v24tag.getGenreDescription()); assertEquals("COMMENT123456789012345678901", id3v24tag.getComment()); assertEquals("COMPOSER23456789012345678901234", id3v24tag.getComposer()); assertEquals("ORIGARTIST234567890123456789012", id3v24tag.getOriginalArtist()); assertEquals("COPYRIGHT2345678901234567890123", id3v24tag.getCopyright()); assertEquals("URL2345678901234567890123456789", id3v24tag.getUrl()); assertEquals("COMMERCIALURL234567890123456789", id3v24tag.getCommercialUrl()); assertEquals("COPYRIGHTURL2345678901234567890", id3v24tag.getCopyrightUrl()); assertEquals("OFFICIALARTISTURL23456789012345", id3v24tag.getArtistUrl()); assertEquals("OFFICIALAUDIOFILE23456789012345", id3v24tag.getAudiofileUrl()); assertEquals("OFFICIALAUDIOSOURCE234567890123", id3v24tag.getAudioSourceUrl()); assertEquals("INTERNETRADIOSTATIONURL23456783", id3v24tag.getRadiostationUrl()); assertEquals("PAYMENTURL234567890123456789012", id3v24tag.getPaymentUrl()); assertEquals("PUBLISHERURL2345678901234567890", id3v24tag.getPublisherUrl()); assertEquals("ENCODER234567890123456789012345", id3v24tag.getEncoder()); assertEquals(1885, id3v24tag.getAlbumImage().length); assertEquals("image/png", id3v24tag.getAlbumImageMimeType()); } @Test public void shouldReadTagFieldsFromMp3With32tag() throws Exception { byte[] buffer = TestHelper.loadFile("src/test/resources/v1andv23tagswithalbumimage.mp3"); ID3v2 id3tag = ID3v2TagFactory.createTag(buffer); assertEquals("1", id3tag.getTrack()); assertEquals("ARTIST123456789012345678901234", id3tag.getArtist()); assertEquals("TITLE1234567890123456789012345", id3tag.getTitle()); assertEquals("ALBUM1234567890123456789012345", id3tag.getAlbum()); assertEquals("2001", id3tag.getYear()); assertEquals(0x0d, id3tag.getGenre()); assertEquals("Pop", id3tag.getGenreDescription()); assertEquals("COMMENT123456789012345678901", id3tag.getComment()); assertEquals("COMPOSER23456789012345678901234", id3tag.getComposer()); assertEquals("ORIGARTIST234567890123456789012", id3tag.getOriginalArtist()); assertEquals("COPYRIGHT2345678901234567890123", id3tag.getCopyright()); assertEquals("URL2345678901234567890123456789", id3tag.getUrl()); assertEquals("ENCODER234567890123456789012345", id3tag.getEncoder()); assertEquals(1885, id3tag.getAlbumImage().length); assertEquals("image/png", id3tag.getAlbumImageMimeType()); } @Test public void shouldConvert23TagToBytesAndBackToEquivalentTag() throws Exception { ID3v2 id3tag = new ID3v23Tag(); setTagFields(id3tag); byte[] data = id3tag.toBytes(); ID3v2 id3tagCopy = new ID3v23Tag(data); assertEquals(2340, data.length); assertEquals(id3tag, id3tagCopy); } @Test public void shouldConvert24TagWithFooterToBytesAndBackToEquivalentTag() throws Exception { ID3v2 id3tag = new ID3v24Tag(); setTagFields(id3tag); id3tag.setFooter(true); byte[] data = id3tag.toBytes(); ID3v2 id3tagCopy = new ID3v24Tag(data); assertEquals(2350, data.length); assertEquals(id3tag, id3tagCopy); } @Test public void shouldConvert24TagWithPaddingToBytesAndBackToEquivalentTag() throws Exception { ID3v2 id3tag = new ID3v24Tag(); setTagFields(id3tag); id3tag.setPadding(true); byte[] data = id3tag.toBytes(); ID3v2 id3tagCopy = new ID3v24Tag(data); assertEquals(2340 + AbstractID3v2Tag.PADDING_LENGTH, data.length); assertEquals(id3tag, id3tagCopy); } @Test public void shouldNotUsePaddingOnA24TagIfItHasAFooter() throws Exception { ID3v2 id3tag = new ID3v24Tag(); setTagFields(id3tag); id3tag.setFooter(true); id3tag.setPadding(true); byte[] data = id3tag.toBytes(); assertEquals(2350, data.length); } @Test public void shouldExtractGenreNumberFromCombinedGenreStringsCorrectly() throws Exception { ID3v23TagForTesting id3tag = new ID3v23TagForTesting(); try { id3tag.extractGenreNumber(""); fail("NumberFormatException expected but not thrown"); } catch (NumberFormatException e) { // expected } assertEquals(13, id3tag.extractGenreNumber("13")); assertEquals(13, id3tag.extractGenreNumber("(13)")); assertEquals(13, id3tag.extractGenreNumber("(13)Pop")); } @Test public void shouldExtractGenreDescriptionFromCombinedGenreStringsCorrectly() throws Exception { ID3v23TagForTesting id3tag = new ID3v23TagForTesting(); assertNull(id3tag.extractGenreDescription("")); assertEquals("", id3tag.extractGenreDescription("(13)")); assertEquals("Pop", id3tag.extractGenreDescription("(13)Pop")); } @Test public void shouldSetCombinedGenreOnTag() throws Exception { ID3v2 id3tag = new ID3v23Tag(); setTagFields(id3tag); Map<String, ID3v2FrameSet> frameSets = id3tag.getFrameSets(); ID3v2FrameSet frameSet = frameSets.get("TCON"); List<ID3v2Frame> frames = frameSet.getFrames(); ID3v2Frame frame = frames.get(0); byte[] bytes = frame.getData(); String genre = BufferTools.byteBufferToString(bytes, 1, bytes.length - 1); assertEquals("(13)Pop", genre); } @Test public void testSetGenreDescriptionOn23Tag() throws Exception { ID3v2 id3tag = new ID3v23Tag(); setTagFields(id3tag); id3tag.setGenreDescription("Jazz"); assertEquals("Jazz", id3tag.getGenreDescription()); assertEquals(8, id3tag.getGenre()); Map<String, ID3v2FrameSet> frameSets = id3tag.getFrameSets(); ID3v2FrameSet frameSet = frameSets.get("TCON"); List<ID3v2Frame> frames = frameSet.getFrames(); ID3v2Frame frame = frames.get(0); byte[] bytes = frame.getData(); String genre = BufferTools.byteBufferToString(bytes, 1, bytes.length - 1); assertEquals("(8)Jazz", genre); } @Test public void testSetGenreDescriptionOn23TagWithUnknownGenre() throws Exception { ID3v2 id3tag = new ID3v23Tag(); setTagFields(id3tag); try { id3tag.setGenreDescription("Bebop"); fail("expected IllegalArgumentException"); } catch (IllegalArgumentException e) { // fine } } @Test public void testSetGenreDescriptionOn24Tag() throws Exception { ID3v2 id3tag = new ID3v24Tag(); setTagFields(id3tag); id3tag.setGenreDescription("Jazz"); assertEquals("Jazz", id3tag.getGenreDescription()); assertEquals(8, id3tag.getGenre()); Map<String, ID3v2FrameSet> frameSets = id3tag.getFrameSets(); ID3v2FrameSet frameSet = frameSets.get("TCON"); List<ID3v2Frame> frames = frameSet.getFrames(); ID3v2Frame frame = frames.get(0); byte[] bytes = frame.getData(); String genre = BufferTools.byteBufferToString(bytes, 1, bytes.length - 1); assertEquals("Jazz", genre); } @Test public void testSetGenreDescriptionOn24TagWithUnknownGenre() throws Exception { ID3v2 id3tag = new ID3v24Tag(); setTagFields(id3tag); id3tag.setGenreDescription("Bebop"); assertEquals("Bebop", id3tag.getGenreDescription()); assertEquals(-1, id3tag.getGenre()); Map<String, ID3v2FrameSet> frameSets = id3tag.getFrameSets(); ID3v2FrameSet frameSet = frameSets.get("TCON"); List<ID3v2Frame> frames = frameSet.getFrames(); ID3v2Frame frame = frames.get(0); byte[] bytes = frame.getData(); String genre = BufferTools.byteBufferToString(bytes, 1, bytes.length - 1); assertEquals("Bebop", genre); } @Test public void shouldReadCombinedGenreInTag() throws Exception { ID3v2 id3tag = new ID3v23Tag(); setTagFields(id3tag); byte[] bytes = id3tag.toBytes(); ID3v2 id3tagFromData = new ID3v23Tag(bytes); assertEquals(13, id3tagFromData.getGenre()); assertEquals("Pop", id3tagFromData.getGenreDescription()); } @Test public void shouldGetCommentAndItunesComment() throws Exception { byte[] buffer = TestHelper.loadFile("src/test/resources/withitunescomment.mp3"); ID3v2 id3tag = ID3v2TagFactory.createTag(buffer); assertEquals("COMMENT123456789012345678901", id3tag.getComment()); assertEquals(" 00000A78 00000A74 00000C7C 00000C6C 00000000 00000000 000051F7 00005634 00000000 00000000", id3tag.getItunesComment()); } @Test public void shouldReadFramesFromMp3WithObselete32Tag() throws Exception { byte[] buffer = TestHelper.loadFile("src/test/resources/obsolete.mp3"); ID3v2 id3v2tag = ID3v2TagFactory.createTag(buffer); assertEquals("2.0", id3v2tag.getVersion()); assertEquals(0x3c5a2, id3v2tag.getLength()); assertEquals(10, id3v2tag.getFrameSets().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TCM")).getFrames().size()); assertEquals(2, (id3v2tag.getFrameSets().get("COM")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TP1")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TAL")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TRK")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TPA")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TYE")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("PIC")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TCO")).getFrames().size()); assertEquals(1, (id3v2tag.getFrameSets().get("TT2")).getFrames().size()); } @Test public void shouldReadTagFieldsFromMp3WithObselete32tag() throws Exception { byte[] buffer = TestHelper.loadFile("src/test/resources/obsolete.mp3"); ID3v2 id3tag = ID3v2TagFactory.createTag(buffer); assertEquals("2009", id3tag.getYear()); assertEquals("4/15", id3tag.getTrack()); assertEquals("image/png", id3tag.getAlbumImageMimeType()); assertEquals(40, id3tag.getGenre()); assertEquals("Alt Rock", id3tag.getGenreDescription()); assertEquals("NAME1234567890123456789012345678901234567890", id3tag.getTitle()); assertEquals("ARTIST1234567890123456789012345678901234567890", id3tag.getArtist()); assertEquals("COMPOSER1234567890123456789012345678901234567890", id3tag.getComposer()); assertEquals("ALBUM1234567890123456789012345678901234567890", id3tag.getAlbum()); assertEquals("COMMENTS1234567890123456789012345678901234567890", id3tag.getComment()); } @Test public void shouldReadTagFieldsWithUnicodeDataFromMp3() throws Exception { byte[] buffer = TestHelper.loadFile("src/test/resources/v23unicodetags.mp3"); ID3v2 id3tag = ID3v2TagFactory.createTag(buffer); assertEquals("\u03B3\u03B5\u03B9\u03AC \u03C3\u03BF\u03C5", id3tag.getArtist()); // greek assertEquals("\u4E2D\u6587", id3tag.getTitle()); // chinese assertEquals("\u3053\u3093\u306B\u3061\u306F", id3tag.getAlbum()); // japanese assertEquals("\u0AB9\u0AC7\u0AB2\u0ACD\u0AB2\u0ACB", id3tag.getComposer()); // gujarati } @Test public void shouldSetTagFieldsWithUnicodeDataAndSpecifiedEncodingCorrectly() throws Exception { ID3v2 id3tag = new ID3v23Tag(); id3tag.setArtist("\u03B3\u03B5\u03B9\u03AC \u03C3\u03BF\u03C5"); id3tag.setTitle("\u4E2D\u6587"); id3tag.setAlbum("\u3053\u3093\u306B\u3061\u306F"); id3tag.setComment("\u03C3\u03BF\u03C5"); id3tag.setComposer("\u0AB9\u0AC7\u0AB2\u0ACD\u0AB2\u0ACB"); id3tag.setOriginalArtist("\u03B3\u03B5\u03B9\u03AC"); id3tag.setCopyright("\u03B3\u03B5"); id3tag.setUrl("URL"); id3tag.setEncoder("\u03B9\u03AC"); byte[] albumImage = TestHelper.loadFile("src/test/resources/image.png"); id3tag.setAlbumImage(albumImage, "image/png"); } @Test public void shouldExtractChapterTOCFramesFromMp3() throws Exception { byte[] buffer = TestHelper.loadFile("src/test/resources/v23tagwithchapters.mp3"); ID3v2 id3tag = ID3v2TagFactory.createTag(buffer); ArrayList<ID3v2ChapterTOCFrameData> chapterTOCs = id3tag.getChapterTOC(); assertEquals(1, chapterTOCs.size()); ID3v2ChapterTOCFrameData tocFrameData = chapterTOCs.get(0); assertEquals("toc1", tocFrameData.getId()); String expectedChildren[] = {"ch1", "ch2", "ch3"}; assertArrayEquals(expectedChildren, tocFrameData.getChildren()); ArrayList<ID3v2Frame> subFrames = tocFrameData.getSubframes(); assertEquals(0, subFrames.size()); } @Test public void shouldExtractChapterTOCAndChapterFramesFromMp3() throws Exception { byte[] buffer = TestHelper.loadFile("src/test/resources/v23tagwithchapters.mp3"); ID3v2 id3tag = ID3v2TagFactory.createTag(buffer); ArrayList<ID3v2ChapterFrameData> chapters = id3tag.getChapters(); assertEquals(3, chapters.size()); ID3v2ChapterFrameData chapter1 = chapters.get(0); assertEquals("ch1", chapter1.getId()); assertEquals(0, chapter1.getStartTime()); assertEquals(5000, chapter1.getEndTime()); assertEquals(-1, chapter1.getStartOffset()); assertEquals(-1, chapter1.getEndOffset()); ArrayList<ID3v2Frame> subFrames1 = chapter1.getSubframes(); assertEquals(1, subFrames1.size()); ID3v2Frame subFrame1 = subFrames1.get(0); assertEquals("TIT2", subFrame1.getId()); ID3v2TextFrameData frameData1 = new ID3v2TextFrameData(false, subFrame1.getData()); assertEquals("start", frameData1.getText().toString()); ID3v2ChapterFrameData chapter2 = chapters.get(1); assertEquals("ch2", chapter2.getId()); assertEquals(5000, chapter2.getStartTime()); assertEquals(10000, chapter2.getEndTime()); assertEquals(-1, chapter2.getStartOffset()); assertEquals(-1, chapter2.getEndOffset()); ArrayList<ID3v2Frame> subFrames2 = chapter2.getSubframes(); assertEquals(1, subFrames2.size()); ID3v2Frame subFrame2 = subFrames2.get(0); assertEquals("TIT2", subFrame2.getId()); ID3v2TextFrameData frameData2 = new ID3v2TextFrameData(false, subFrame2.getData()); assertEquals("5 seconds", frameData2.getText().toString()); ID3v2ChapterFrameData chapter3 = chapters.get(2); assertEquals("ch3", chapter3.getId()); assertEquals(10000, chapter3.getStartTime()); assertEquals(15000, chapter3.getEndTime()); assertEquals(-1, chapter3.getStartOffset()); assertEquals(-1, chapter3.getEndOffset()); ArrayList<ID3v2Frame> subFrames3 = chapter3.getSubframes(); assertEquals(1, subFrames3.size()); ID3v2Frame subFrame3 = subFrames3.get(0); assertEquals("TIT2", subFrame3.getId()); ID3v2TextFrameData frameData3 = new ID3v2TextFrameData(false, subFrame3.getData()); assertEquals("10 seconds", frameData3.getText().toString()); } @Test public void shouldReadTagFieldsFromMp3With32tagResavedByMp3tagWithUTF16Encoding() throws Exception { byte[] buffer = TestHelper.loadFile("src/test/resources/v1andv23tagswithalbumimage-utf16le.mp3"); ID3v2 id3tag = ID3v2TagFactory.createTag(buffer); assertEquals("1", id3tag.getTrack()); assertEquals("ARTIST123456789012345678901234", id3tag.getArtist()); assertEquals("TITLE1234567890123456789012345", id3tag.getTitle()); assertEquals("ALBUM1234567890123456789012345", id3tag.getAlbum()); assertEquals("2001", id3tag.getYear()); assertEquals(0x01, id3tag.getGenre()); assertEquals("Classic Rock", id3tag.getGenreDescription()); assertEquals("COMMENT123456789012345678901", id3tag.getComment()); assertEquals("COMPOSER23456789012345678901234", id3tag.getComposer()); assertEquals("ORIGARTIST234567890123456789012", id3tag.getOriginalArtist()); assertEquals("COPYRIGHT2345678901234567890123", id3tag.getCopyright()); assertEquals("URL2345678901234567890123456789", id3tag.getUrl()); assertEquals("ENCODER234567890123456789012345", id3tag.getEncoder()); assertEquals(1885, id3tag.getAlbumImage().length); assertEquals("image/png", id3tag.getAlbumImageMimeType()); } @Test public void shouldRemoveAlbumImageFrame() throws Exception { byte[] buffer = TestHelper.loadFile("src/test/resources/v1andv23tagswithalbumimage.mp3"); ID3v2 id3tag = ID3v2TagFactory.createTag(buffer); assertEquals(1885, id3tag.getAlbumImage().length); id3tag.clearAlbumImage(); assertNull(id3tag.getAlbumImage()); } private void setTagFields(ID3v2 id3tag) throws IOException { id3tag.setTrack("1"); id3tag.setArtist("ARTIST"); id3tag.setTitle("TITLE"); id3tag.setAlbum("ALBUM"); id3tag.setYear("1954"); id3tag.setGenre(0x0d); id3tag.setComment("COMMENT"); id3tag.setComposer("COMPOSER"); id3tag.setOriginalArtist("ORIGINALARTIST"); id3tag.setCopyright("COPYRIGHT"); id3tag.setUrl("URL"); id3tag.setCommercialUrl("COMMERCIALURL"); id3tag.setCopyrightUrl("COPYRIGHTURL"); id3tag.setArtistUrl("OFFICIALARTISTURL"); id3tag.setAudiofileUrl("OFFICIALAUDIOFILEURL"); id3tag.setAudioSourceUrl("OFFICIALAUDIOSOURCEURL"); id3tag.setRadiostationUrl("INTERNETRADIOSTATIONURL"); id3tag.setPaymentUrl("PAYMENTURL"); id3tag.setPublisherUrl("PUBLISHERURL"); id3tag.setEncoder("ENCODER"); byte[] albumImage = TestHelper.loadFile("src/test/resources/image.png"); id3tag.setAlbumImage(albumImage, "image/png"); } private ID3v2 createTag(byte[] buffer) throws NoSuchTagException, UnsupportedTagException, InvalidDataException { ID3v2TagFactoryForTesting factory = new ID3v2TagFactoryForTesting(); return factory.createTag(buffer); } class ID3v22TagForTesting extends ID3v22Tag { public ID3v22TagForTesting(byte[] buffer) throws NoSuchTagException, UnsupportedTagException, InvalidDataException { super(buffer); } protected int unpackFrames(byte[] buffer, int offset, int framesLength) { return offset; } } class ID3v23TagForTesting extends ID3v23Tag { public ID3v23TagForTesting() { super(); } public ID3v23TagForTesting(byte[] buffer) throws NoSuchTagException, UnsupportedTagException, InvalidDataException { super(buffer); } protected int unpackFrames(byte[] buffer, int offset, int framesLength) { return offset; } } class ID3v24TagForTesting extends ID3v24Tag { public ID3v24TagForTesting(byte[] buffer) throws NoSuchTagException, UnsupportedTagException, InvalidDataException { super(buffer); } protected int unpackFrames(byte[] buffer, int offset, int framesLength) { return offset; } } class ID3v2TagFactoryForTesting { protected static final int HEADER_LENGTH = 10; protected static final String TAG = "ID3"; protected static final int MAJOR_VERSION_OFFSET = 3; public ID3v2 createTag(byte[] buffer) throws NoSuchTagException, UnsupportedTagException, InvalidDataException { int majorVersion = buffer[MAJOR_VERSION_OFFSET]; switch (majorVersion) { case 2: return new ID3v22TagForTesting(buffer); case 3: return new ID3v23TagForTesting(buffer); case 4: return new ID3v24TagForTesting(buffer); } throw new UnsupportedTagException("Tag version not supported"); } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.spanner.connection; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import com.google.cloud.spanner.ErrorCode; import com.google.cloud.spanner.ResultSet; import com.google.cloud.spanner.SpannerException; import com.google.cloud.spanner.Statement; import com.google.spanner.v1.CommitRequest; import com.google.spanner.v1.ExecuteBatchDmlRequest; import com.google.spanner.v1.ExecuteSqlRequest; import java.util.Arrays; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @RunWith(JUnit4.class) public class TaggingTest extends AbstractMockServerTest { @After public void clearRequests() { mockSpanner.clearRequests(); } @Test public void testStatementTagNotAllowedForCommit() { try (Connection connection = createConnection()) { connection.setStatementTag("tag-1"); try { connection.commit(); fail("missing expected exception"); } catch (SpannerException e) { assertEquals(ErrorCode.FAILED_PRECONDITION, e.getErrorCode()); } } } @Test public void testStatementTagNotAllowedForRollback() { try (Connection connection = createConnection()) { connection.setStatementTag("tag-1"); try { connection.rollback(); fail("missing expected exception"); } catch (SpannerException e) { assertEquals(ErrorCode.FAILED_PRECONDITION, e.getErrorCode()); } } } @Test public void testStatementTagNotAllowedInsideBatch() { try (Connection connection = createConnection()) { for (boolean autocommit : new boolean[] {true, false}) { connection.setAutocommit(autocommit); connection.startBatchDml(); try { connection.setStatementTag("tag-1"); fail("missing expected exception"); } catch (SpannerException e) { assertEquals(ErrorCode.FAILED_PRECONDITION, e.getErrorCode()); } connection.abortBatch(); } } } @Test public void testQuery_NoTags() { try (Connection connection = createConnection()) { for (boolean autocommit : new boolean[] {true, false}) { connection.setAutocommit(autocommit); try (ResultSet rs = connection.executeQuery(SELECT_COUNT_STATEMENT)) {} assertEquals(1, mockSpanner.countRequestsOfType(ExecuteSqlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); } } } @Test public void testUpdate_NoTags() { try (Connection connection = createConnection()) { for (boolean autocommit : new boolean[] {true, false}) { connection.setAutocommit(autocommit); connection.executeUpdate(INSERT_STATEMENT); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteSqlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); } } } @Test public void testPartitionedUpdate_NoTags() { try (Connection connection = createConnection()) { connection.setAutocommit(true); connection.setAutocommitDmlMode(AutocommitDmlMode.PARTITIONED_NON_ATOMIC); connection.executeUpdate(INSERT_STATEMENT); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteSqlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); } } @Test public void testBatchUpdate_NoTags() { try (Connection connection = createConnection()) { for (boolean autocommit : new boolean[] {true, false}) { connection.setAutocommit(autocommit); connection.executeBatchUpdate(Arrays.asList(INSERT_STATEMENT)); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteBatchDmlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); } } } @Test public void testQuery_StatementTag() { try (Connection connection = createConnection()) { for (boolean autocommit : new boolean[] {true, false}) { connection.setAutocommit(autocommit); connection.setStatementTag("tag-1"); try (ResultSet rs = connection.executeQuery(SELECT_COUNT_STATEMENT)) {} assertEquals(1, mockSpanner.countRequestsOfType(ExecuteSqlRequest.class)); assertEquals( "tag-1", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); // The tag should automatically be cleared after a statement. try (ResultSet rs = connection.executeQuery(SELECT_COUNT_STATEMENT)) {} assertEquals(1, mockSpanner.countRequestsOfType(ExecuteSqlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); } } } @Test public void testUpdate_StatementTag() { try (Connection connection = createConnection()) { for (boolean autocommit : new boolean[] {true, false}) { connection.setAutocommit(autocommit); connection.setStatementTag("tag-2"); connection.executeUpdate(INSERT_STATEMENT); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteSqlRequest.class)); assertEquals( "tag-2", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); connection.executeUpdate(INSERT_STATEMENT); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteSqlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); } } } @Test public void testPartitionedUpdate_StatementTag() { try (Connection connection = createConnection()) { connection.setAutocommit(true); connection.setAutocommitDmlMode(AutocommitDmlMode.PARTITIONED_NON_ATOMIC); connection.setStatementTag("tag-4"); connection.executeUpdate(INSERT_STATEMENT); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteSqlRequest.class)); assertEquals( "tag-4", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); connection.executeUpdate(INSERT_STATEMENT); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteSqlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); } } @Test public void testBatchUpdate_StatementTag() { try (Connection connection = createConnection()) { for (boolean autocommit : new boolean[] {true, false}) { connection.setAutocommit(autocommit); connection.setStatementTag("tag-3"); connection.executeBatchUpdate(Arrays.asList(INSERT_STATEMENT)); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteBatchDmlRequest.class)); assertEquals( "tag-3", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); connection.executeBatchUpdate(Arrays.asList(INSERT_STATEMENT)); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteBatchDmlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); } } } @Test public void testQuery_TransactionTag() { try (Connection connection = createConnection()) { connection.setTransactionTag("tag-1"); try (ResultSet rs = connection.executeQuery(SELECT_COUNT_STATEMENT)) {} connection.commit(); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteSqlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "tag-1", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); assertEquals(1, mockSpanner.countRequestsOfType(CommitRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "tag-1", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); // The tag should automatically be cleared after a statement. try (ResultSet rs = connection.executeQuery(SELECT_COUNT_STATEMENT)) {} connection.commit(); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteSqlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); assertEquals(1, mockSpanner.countRequestsOfType(CommitRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); } } @Test public void testUpdate_TransactionTag() { try (Connection connection = createConnection()) { connection.setTransactionTag("tag-2"); connection.executeUpdate(INSERT_STATEMENT); connection.commit(); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteSqlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "tag-2", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); assertEquals(1, mockSpanner.countRequestsOfType(CommitRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "tag-2", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); connection.executeUpdate(INSERT_STATEMENT); connection.commit(); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteSqlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteSqlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); assertEquals(1, mockSpanner.countRequestsOfType(CommitRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); } } @Test public void testBatchUpdate_TransactionTag() { try (Connection connection = createConnection()) { connection.setTransactionTag("tag-3"); connection.executeBatchUpdate(Arrays.asList(INSERT_STATEMENT)); connection.commit(); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteBatchDmlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "tag-3", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); assertEquals(1, mockSpanner.countRequestsOfType(CommitRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "tag-3", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); connection.executeBatchUpdate(Arrays.asList(INSERT_STATEMENT)); connection.commit(); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteBatchDmlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); assertEquals(1, mockSpanner.countRequestsOfType(CommitRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); } } @Test public void testDmlBatch_StatementTag() { try (Connection connection = createConnection()) { for (boolean autocommit : new boolean[] {true, false}) { connection.setAutocommit(autocommit); connection.setStatementTag("batch-tag"); connection.startBatchDml(); connection.execute(INSERT_STATEMENT); connection.execute(INSERT_STATEMENT); connection.runBatch(); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteBatchDmlRequest.class)); assertEquals( "batch-tag", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); } } } @Test public void testRunBatch_TransactionTag() { try (Connection connection = createConnection()) { connection.setTransactionTag("batch-tag"); connection.startBatchDml(); connection.execute(INSERT_STATEMENT); connection.execute(INSERT_STATEMENT); connection.runBatch(); connection.commit(); assertEquals(1, mockSpanner.countRequestsOfType(ExecuteBatchDmlRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "batch-tag", mockSpanner .getRequestsOfType(ExecuteBatchDmlRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); assertEquals(1, mockSpanner.countRequestsOfType(CommitRequest.class)); assertEquals( "", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getRequestTag()); assertEquals( "batch-tag", mockSpanner .getRequestsOfType(CommitRequest.class) .get(0) .getRequestOptions() .getTransactionTag()); mockSpanner.clearRequests(); } } @Test public void testShowSetTags() { try (Connection connection = createConnection()) { connection.execute(Statement.of("SET STATEMENT_TAG='tag1'")); try (ResultSet rs = connection.execute(Statement.of("SHOW VARIABLE STATEMENT_TAG")).getResultSet()) { assertTrue(rs.next()); assertEquals("tag1", rs.getString("STATEMENT_TAG")); assertFalse(rs.next()); } connection.execute(Statement.of("SET STATEMENT_TAG=''")); try (ResultSet rs = connection.execute(Statement.of("SHOW VARIABLE STATEMENT_TAG")).getResultSet()) { assertTrue(rs.next()); assertEquals("", rs.getString("STATEMENT_TAG")); assertFalse(rs.next()); } connection.execute(Statement.of("SET TRANSACTION_TAG='tag2'")); try (ResultSet rs = connection.execute(Statement.of("SHOW VARIABLE TRANSACTION_TAG")).getResultSet()) { assertTrue(rs.next()); assertEquals("tag2", rs.getString("TRANSACTION_TAG")); assertFalse(rs.next()); } connection.execute(Statement.of("SET TRANSACTION_TAG=''")); try (ResultSet rs = connection.execute(Statement.of("SHOW VARIABLE TRANSACTION_TAG")).getResultSet()) { assertTrue(rs.next()); assertEquals("", rs.getString("TRANSACTION_TAG")); assertFalse(rs.next()); } } } }
package org.mediawiki.dumper.gui; import java.sql.SQLException; import javax.swing.JFrame; import javax.swing.JOptionPane; import java.io.IOException; import java.io.InputStream; import java.sql.Connection; import java.sql.DriverManager; import java.sql.Statement; import org.mediawiki.dumper.Tools; import org.mediawiki.importer.DumpWriter; import org.mediawiki.importer.SqlServerStream; import org.mediawiki.importer.SqlWriter; import org.mediawiki.importer.SqlWriter14; import org.mediawiki.importer.SqlWriter15; import org.mediawiki.importer.XmlDumpReader; public class DumperGui { private DumperWindow gui; // status public boolean running = false; public boolean connected = false; public boolean schemaReady = false; public static final int DBTYPE_MYSQL = 0 , DBTYPE_PGSQL = 1 ; // other goodies String host = "localhost"; String port = "3306"; String username = "root"; String password = ""; String schema = "1.5"; String dbname = "wikidb"; String prefix = ""; XmlDumpReader reader; Connection conn; private int dbtype; String driverForDatabase(int dbtype) { switch (dbtype) { case DBTYPE_MYSQL: return "com.mysql.jdbc.Driver"; case DBTYPE_PGSQL: return "org.postgresql.Driver"; default: return null; } } String urlForDatabase(int dbtype, String host, String port, String username, String password) { switch (dbtype) { case DBTYPE_MYSQL: return "jdbc:mysql://" + host + ":" + port + "/" + // dbname + "?user=" + username + "&password=" + password + "&useUnicode=true" + "&characterEncoding=UTF-8" + "&jdbcCompliantTruncation=false"; case DBTYPE_PGSQL: return "jdbc:postgresql://" + host + ":" + port + "/" + "?user=" + username + "&password=" + password; default: return null; } } void connect(int dbtype, String host, String port, String username, String password) { assert !connected; assert conn == null; assert !running; assert !schemaReady; try { Class.forName(driverForDatabase(dbtype)).newInstance(); } catch (ClassNotFoundException ex) { ex.printStackTrace(); } catch (InstantiationException ex) { ex.printStackTrace(); } catch (IllegalAccessException ex) { ex.printStackTrace(); } gui.setDatabaseStatus("Connecting..."); try { // fixme is there escaping? is this a url? fucking java bullshit String url = urlForDatabase(dbtype, host, port, username, password); System.err.println("Connecting to " + url); conn = DriverManager.getConnection(url); connected = true; this.dbtype = dbtype; gui.setDatabaseStatus("Connected."); gui.showFields(); checkSchema(); } catch (SQLException ex) { JOptionPane.showMessageDialog(gui, "Failed to connect to database: " + ex.getMessage(), "Database Connection Error", JOptionPane.ERROR_MESSAGE); gui.setDatabaseStatus("Failed to connect."); ex.printStackTrace(); } assert (connected == (conn != null)); } void disconnect() { assert connected; assert conn != null; assert !running; try { conn.close(); conn = null; connected = false; gui.setDatabaseStatus("Disconnected."); gui.showFields(); checkSchema(); } catch (SQLException ex) { ex.printStackTrace(); } assert !connected; assert conn == null; } void setDbname(String dbname) { this.dbname = dbname; checkSchema(); } void setPrefix(String prefix) { this.prefix = prefix; checkSchema(); } void setSchema(String schema) { this.schema = schema; checkSchema(); } void checkSchema() { schemaReady = false; if (connected) { gui.setSchemaStatus("Checking..."); try { conn.setCatalog(dbname); String[] tables = testTables(); for (int i = 0; i < tables.length; i++) { Statement sql = conn.createStatement(); sql.execute("SELECT 1 FROM " + tables[i] + " LIMIT 0"); } schemaReady = true; gui.setSchemaStatus("Ready"); } catch (SQLException e) { gui.setSchemaStatus("Error: " + e.getMessage()); } } else { gui.setSchemaStatus("Not connected."); } gui.showFields(); assert !(schemaReady && !connected) : "Schema can't be ready if disconnected."; } String[] testTables() { if (schema.equals("1.4")) return new String[] { prefix + "cur", prefix + "old"}; else return new String[] { prefix + "page", prefix + "revision", prefix + (this.dbtype == DBTYPE_MYSQL ? "text" : "pagecontent")}; } void startImport(String inputFile) throws IOException, SQLException { assert connected; assert conn != null; assert schemaReady; assert !running; // work right ;) final InputStream stream = Tools.openInputFile(inputFile); //DumpWriter writer = new MultiWriter(); conn.setCatalog(dbname); DumpWriter writer = openWriter(); DumpWriter progress = gui.getProgressWriter(writer, 1000); reader = new XmlDumpReader(stream, progress); new Thread() { public void run() { running = true; gui.showFields(); gui.setProgress("Starting import..."); try { reader.readDump(); stream.close(); } catch(IOException e) { e.printStackTrace(); gui.setProgress("FAILED: " + e.getMessage()); } running = false; reader = null; gui.showFields(); } }.start(); } private SqlWriter.Traits getTraits() { switch (dbtype) { case DBTYPE_MYSQL: return new SqlWriter.MySQLTraits(); case DBTYPE_PGSQL: return new SqlWriter.PostgresTraits(); default: return null; } } DumpWriter openWriter() { SqlServerStream sqlStream = new SqlServerStream(conn); /* XXX should have mysql/postgres selection */ if (schema.equals("1.4")) return new SqlWriter14(getTraits(), sqlStream, prefix); else return new SqlWriter15(getTraits(), sqlStream, prefix); } void abort() { // Request an abort! gui.setProgress("Aborting import..."); reader.abort(); } /** * @param args */ public static void main(String[] args) { // Set up some prettification if we're on Mac OS X System.setProperty("apple.laf.useScreenMenuBar", "true"); System.setProperty("com.apple.mrj.application.apple.menu.about.name", "MediaWiki Import"); DumperGui manager = new DumperGui(); } public DumperGui() { gui = new DumperWindow(this); gui.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); gui.setVisible(true); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.server.cluster; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledExecutorService; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.ActiveMQExceptionType; import org.apache.activemq.artemis.api.core.BroadcastGroupConfiguration; import org.apache.activemq.artemis.api.core.DiscoveryGroupConfiguration; import org.apache.activemq.artemis.api.core.Interceptor; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.TransportConfiguration; import org.apache.activemq.artemis.api.core.client.ActiveMQClient; import org.apache.activemq.artemis.core.client.impl.ServerLocatorInternal; import org.apache.activemq.artemis.core.config.BridgeConfiguration; import org.apache.activemq.artemis.core.config.ClusterConnectionConfiguration; import org.apache.activemq.artemis.core.config.Configuration; import org.apache.activemq.artemis.core.filter.impl.FilterImpl; import org.apache.activemq.artemis.core.postoffice.Binding; import org.apache.activemq.artemis.core.postoffice.PostOffice; import org.apache.activemq.artemis.core.protocol.core.Channel; import org.apache.activemq.artemis.core.protocol.core.CoreRemotingConnection; import org.apache.activemq.artemis.core.protocol.core.Packet; import org.apache.activemq.artemis.core.protocol.core.impl.PacketImpl; import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.ActiveMQExceptionMessage; import org.apache.activemq.artemis.core.server.ActiveMQComponent; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.ActiveMQServerLogger; import org.apache.activemq.artemis.core.server.NodeManager; import org.apache.activemq.artemis.core.server.Queue; import org.apache.activemq.artemis.core.server.cluster.ha.HAManager; import org.apache.activemq.artemis.core.server.cluster.impl.BridgeImpl; import org.apache.activemq.artemis.core.server.cluster.impl.BroadcastGroupImpl; import org.apache.activemq.artemis.core.server.cluster.impl.ClusterConnectionImpl; import org.apache.activemq.artemis.core.server.cluster.qourum.QuorumManager; import org.apache.activemq.artemis.core.server.impl.Activation; import org.apache.activemq.artemis.core.server.management.ManagementService; import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection; import org.apache.activemq.artemis.spi.core.remoting.Acceptor; import org.apache.activemq.artemis.utils.ConcurrentHashSet; import org.apache.activemq.artemis.utils.ExecutorFactory; import org.apache.activemq.artemis.utils.FutureLatch; /** * A ClusterManager manages {@link ClusterConnection}s, {@link BroadcastGroup}s and {@link Bridge}s. * <p> * Note that {@link org.apache.activemq.artemis.core.server.cluster.impl.ClusterConnectionBridge}s extend Bridges but are controlled over through * {@link ClusterConnectionImpl}. As a node is discovered a new {@link org.apache.activemq.artemis.core.server.cluster.impl.ClusterConnectionBridge} is * deployed. */ public final class ClusterManager implements ActiveMQComponent { private ClusterController clusterController; private HAManager haManager; private final Map<String, BroadcastGroup> broadcastGroups = new HashMap(); private final Map<String, Bridge> bridges = new HashMap(); private final ExecutorFactory executorFactory; private final ActiveMQServer server; private final PostOffice postOffice; private final ScheduledExecutorService scheduledExecutor; private ClusterConnection defaultClusterConnection; private final ManagementService managementService; private final Configuration configuration; public QuorumManager getQuorumManager() { return clusterController.getQuorumManager(); } public ClusterController getClusterController() { return clusterController; } public HAManager getHAManager() { return haManager; } public void addClusterChannelHandler(Channel channel, Acceptor acceptorUsed, CoreRemotingConnection remotingConnection, Activation activation) { clusterController.addClusterChannelHandler(channel, acceptorUsed, remotingConnection, activation); } enum State { STOPPED, /** * Used because {@link ClusterManager#stop()} method is not completely synchronized */ STOPPING, /** * Deployed means {@link ClusterManager#deploy()} was called but * {@link ClusterManager#start()} was not called. * <p> * We need the distinction if {@link ClusterManager#stop()} is called before 'start'. As * otherwise we would leak locators. */ DEPLOYED, STARTED, } private volatile State state = State.STOPPED; // the cluster connections which links this node to other cluster nodes private final Map<String, ClusterConnection> clusterConnections = new HashMap<>(); private final Set<ServerLocatorInternal> clusterLocators = new ConcurrentHashSet<>(); private final Executor executor; private final NodeManager nodeManager; public ClusterManager(final ExecutorFactory executorFactory, final ActiveMQServer server, final PostOffice postOffice, final ScheduledExecutorService scheduledExecutor, final ManagementService managementService, final Configuration configuration, final NodeManager nodeManager, final boolean backup) { this.executorFactory = executorFactory; executor = executorFactory.getExecutor(); this.server = server; this.postOffice = postOffice; this.scheduledExecutor = scheduledExecutor; this.managementService = managementService; this.configuration = configuration; this.nodeManager = nodeManager; clusterController = new ClusterController(server, scheduledExecutor); haManager = server.getActivation().getHAManager(); } public String describe() { StringWriter str = new StringWriter(); PrintWriter out = new PrintWriter(str); out.println("Information on " + this); out.println("*******************************************************"); for (ClusterConnection conn : cloneClusterConnections()) { out.println(conn.describe()); } out.println("*******************************************************"); return str.toString(); } /** * Return the default ClusterConnection to be used case it's not defined by the acceptor * * @return default connection */ public ClusterConnection getDefaultConnection(TransportConfiguration acceptorConfig) { if (acceptorConfig == null) { // if the parameter is null, we just return whatever is defined on defaultClusterConnection return defaultClusterConnection; } else if (defaultClusterConnection != null && defaultClusterConnection.getConnector().isEquivalent(acceptorConfig)) { return defaultClusterConnection; } else { for (ClusterConnection conn : cloneClusterConnections()) { if (conn.getConnector().isEquivalent(acceptorConfig)) { return conn; } } return null; } } @Override public String toString() { return "ClusterManagerImpl[server=" + server + "]@" + System.identityHashCode(this); } public String getNodeId() { return nodeManager.getNodeId().toString(); } public String getBackupGroupName() { return server.getHAPolicy().getBackupGroupName(); } public String getScaleDownGroupName() { return server.getHAPolicy().getScaleDownGroupName(); } public synchronized void deploy() throws Exception { if (state == State.STOPPED) { state = State.DEPLOYED; } else { throw new IllegalStateException(); } for (BroadcastGroupConfiguration config : configuration.getBroadcastGroupConfigurations()) { deployBroadcastGroup(config); } for (ClusterConnectionConfiguration config : configuration.getClusterConfigurations()) { deployClusterConnection(config); } /* * only start if we are actually in a cluster * */ if (clusterConnections.size() > 0) { clusterController.start(); } } @Override public synchronized void start() throws Exception { if (state == State.STARTED) { return; } for (BroadcastGroup group : broadcastGroups.values()) { try { group.start(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.unableToStartBroadcastGroup(e, group.getName()); } } for (ClusterConnection conn : clusterConnections.values()) { try { conn.start(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.unableToStartClusterConnection(e, conn.getName()); } } deployConfiguredBridges(); for (Bridge bridge : bridges.values()) { try { bridge.start(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.unableToStartBridge(e, bridge.getName()); } } //now start the ha manager haManager.start(); state = State.STARTED; } private void deployConfiguredBridges() throws Exception { for (BridgeConfiguration config : configuration.getBridgeConfigurations()) { deployBridge(config); } } @Override public void stop() throws Exception { haManager.stop(); synchronized (this) { if (state == State.STOPPED || state == State.STOPPING) { return; } state = State.STOPPING; clusterController.stop(); for (BroadcastGroup group : broadcastGroups.values()) { group.stop(); managementService.unregisterBroadcastGroup(group.getName()); } broadcastGroups.clear(); for (ClusterConnection clusterConnection : clusterConnections.values()) { clusterConnection.stop(); managementService.unregisterCluster(clusterConnection.getName().toString()); } for (Bridge bridge : bridges.values()) { bridge.stop(); managementService.unregisterBridge(bridge.getName().toString()); } bridges.clear(); } for (ServerLocatorInternal clusterLocator : clusterLocators) { try { clusterLocator.close(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.errorClosingServerLocator(e, clusterLocator); } } clusterLocators.clear(); state = State.STOPPED; clearClusterConnections(); } public void flushExecutor() { FutureLatch future = new FutureLatch(); executor.execute(future); if (!future.await(10000)) { ActiveMQServerLogger.LOGGER.couldNotFlushClusterManager(this.toString()); server.threadDump(); } } @Override public boolean isStarted() { return state == State.STARTED; } public Map<String, Bridge> getBridges() { return new HashMap<>(bridges); } public Set<ClusterConnection> getClusterConnections() { return new HashSet<>(clusterConnections.values()); } public Set<BroadcastGroup> getBroadcastGroups() { return new HashSet<>(broadcastGroups.values()); } public ClusterConnection getClusterConnection(final String name) { return clusterConnections.get(name); } public void removeClusterLocator(final ServerLocatorInternal serverLocator) { this.clusterLocators.remove(serverLocator); } public synchronized void deployBridge(final BridgeConfiguration config) throws Exception { if (config.getName() == null) { ActiveMQServerLogger.LOGGER.bridgeNotUnique(); return; } if (config.getQueueName() == null) { ActiveMQServerLogger.LOGGER.bridgeNoQueue(config.getName()); return; } if (config.getForwardingAddress() == null) { ActiveMQServerLogger.LOGGER.bridgeNoForwardAddress(config.getName()); } if (bridges.containsKey(config.getName())) { ActiveMQServerLogger.LOGGER.bridgeAlreadyDeployed(config.getName()); return; } Transformer transformer = server.getServiceRegistry().getBridgeTransformer(config.getName(), config.getTransformerClassName()); Binding binding = postOffice.getBinding(new SimpleString(config.getQueueName())); if (binding == null) { ActiveMQServerLogger.LOGGER.bridgeQueueNotFound(config.getQueueName(), config.getName()); return; } Queue queue = (Queue) binding.getBindable(); ServerLocatorInternal serverLocator; if (config.getDiscoveryGroupName() != null) { DiscoveryGroupConfiguration discoveryGroupConfiguration = configuration.getDiscoveryGroupConfigurations().get(config.getDiscoveryGroupName()); if (discoveryGroupConfiguration == null) { ActiveMQServerLogger.LOGGER.bridgeNoDiscoveryGroup(config.getDiscoveryGroupName()); return; } if (config.isHA()) { serverLocator = (ServerLocatorInternal) ActiveMQClient.createServerLocatorWithHA(discoveryGroupConfiguration); } else { serverLocator = (ServerLocatorInternal) ActiveMQClient.createServerLocatorWithoutHA(discoveryGroupConfiguration); } } else { TransportConfiguration[] tcConfigs = configuration.getTransportConfigurations(config.getStaticConnectors()); if (tcConfigs == null) { ActiveMQServerLogger.LOGGER.bridgeCantFindConnectors(config.getName()); return; } if (config.isHA()) { serverLocator = (ServerLocatorInternal) ActiveMQClient.createServerLocatorWithHA(tcConfigs); } else { serverLocator = (ServerLocatorInternal) ActiveMQClient.createServerLocatorWithoutHA(tcConfigs); } } serverLocator.setIdentity("Bridge " + config.getName()); serverLocator.setConfirmationWindowSize(config.getConfirmationWindowSize()); // We are going to manually retry on the bridge in case of failure serverLocator.setReconnectAttempts(0); serverLocator.setInitialConnectAttempts(0); serverLocator.setRetryInterval(config.getRetryInterval()); serverLocator.setMaxRetryInterval(config.getMaxRetryInterval()); serverLocator.setRetryIntervalMultiplier(config.getRetryIntervalMultiplier()); serverLocator.setClientFailureCheckPeriod(config.getClientFailureCheckPeriod()); serverLocator.setConnectionTTL(config.getConnectionTTL()); serverLocator.setBlockOnDurableSend(!config.isUseDuplicateDetection()); serverLocator.setBlockOnNonDurableSend(!config.isUseDuplicateDetection()); serverLocator.setMinLargeMessageSize(config.getMinLargeMessageSize()); //disable flow control serverLocator.setProducerWindowSize(-1); // This will be set to 30s unless it's changed from embedded / testing // there is no reason to exception the config for this timeout // since the Bridge is supposed to be non-blocking and fast // We may expose this if we find a good use case serverLocator.setCallTimeout(config.getCallTimeout()); serverLocator.addIncomingInterceptor(new IncomingInterceptorLookingForExceptionMessage(this, executor)); if (!config.isUseDuplicateDetection()) { ActiveMQServerLogger.LOGGER.debug("Bridge " + config.getName() + " is configured to not use duplicate detecion, it will send messages synchronously"); } clusterLocators.add(serverLocator); Bridge bridge = new BridgeImpl(serverLocator, config.getInitialConnectAttempts(), config.getReconnectAttempts(), config.getReconnectAttemptsOnSameNode(), config.getRetryInterval(), config.getRetryIntervalMultiplier(), config.getMaxRetryInterval(), nodeManager.getUUID(), new SimpleString(config.getName()), queue, executorFactory.getExecutor(), FilterImpl.createFilter(config.getFilterString()), SimpleString.toSimpleString(config.getForwardingAddress()), scheduledExecutor, transformer, config.isUseDuplicateDetection(), config.getUser(), config.getPassword(), server.getStorageManager()); bridges.put(config.getName(), bridge); managementService.registerBridge(bridge, config); bridge.start(); } public static class IncomingInterceptorLookingForExceptionMessage implements Interceptor { private final ClusterManager manager; private final Executor executor; /** * @param manager * @param executor */ public IncomingInterceptorLookingForExceptionMessage(ClusterManager manager, Executor executor) { this.manager = manager; this.executor = executor; } @Override public boolean intercept(Packet packet, RemotingConnection connection) throws ActiveMQException { if (packet.getType() == PacketImpl.EXCEPTION) { ActiveMQExceptionMessage msg = (ActiveMQExceptionMessage) packet; final ActiveMQException exception = msg.getException(); if (exception.getType() == ActiveMQExceptionType.CLUSTER_SECURITY_EXCEPTION) { ActiveMQServerLogger.LOGGER.clusterManagerAuthenticationError(exception.getMessage()); executor.execute(new Runnable() { @Override public void run() { try { manager.stop(); } catch (Exception e) { e.printStackTrace(); } } }); } } return true; } } public void destroyBridge(final String name) throws Exception { Bridge bridge; synchronized (this) { bridge = bridges.remove(name); if (bridge != null) { bridge.stop(); managementService.unregisterBridge(name); } } if (bridge != null) { bridge.flushExecutor(); } } // for testing public void clear() { for (Bridge bridge : bridges.values()) { try { bridge.stop(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.warn(e.getMessage(), e); } } bridges.clear(); for (ClusterConnection clusterConnection : clusterConnections.values()) { try { clusterConnection.stop(); } catch (Exception e) { e.printStackTrace(); } } clearClusterConnections(); } public void informClusterOfBackup(String name) { ClusterConnection clusterConnection = clusterConnections.get(name); if (clusterConnection != null) { clusterConnection.informClusterOfBackup(); } } // Private methods ---------------------------------------------------------------------------------------------------- private void clearClusterConnections() { clusterConnections.clear(); this.defaultClusterConnection = null; } private void deployClusterConnection(final ClusterConnectionConfiguration config) throws Exception { if (!config.validateConfiguration()) { return; } TransportConfiguration connector = config.getTransportConfiguration(configuration); if (connector == null) { return; } if (clusterConnections.containsKey(config.getName())) { ActiveMQServerLogger.LOGGER.clusterConnectionAlreadyExists(config.getConnectorName()); return; } ClusterConnectionImpl clusterConnection; if (config.getDiscoveryGroupName() != null) { DiscoveryGroupConfiguration dg = config.getDiscoveryGroupConfiguration(configuration); if (dg == null) return; if (ActiveMQServerLogger.LOGGER.isDebugEnabled()) { ActiveMQServerLogger.LOGGER.debug(this + " Starting a Discovery Group Cluster Connection, name=" + config.getDiscoveryGroupName() + ", dg=" + dg); } clusterConnection = new ClusterConnectionImpl(this, dg, connector, new SimpleString(config.getName()), new SimpleString(config.getAddress()), config.getMinLargeMessageSize(), config.getClientFailureCheckPeriod(), config.getConnectionTTL(), config.getRetryInterval(), config.getRetryIntervalMultiplier(), config.getMaxRetryInterval(), config.getInitialConnectAttempts(), config.getReconnectAttempts(), config.getCallTimeout(), config.getCallFailoverTimeout(), config.isDuplicateDetection(), config.getMessageLoadBalancingType(), config.getConfirmationWindowSize(), executorFactory, server, postOffice, managementService, scheduledExecutor, config.getMaxHops(), nodeManager, server.getConfiguration().getClusterUser(), server.getConfiguration().getClusterPassword(), config.isAllowDirectConnectionsOnly(), config.getClusterNotificationInterval(), config.getClusterNotificationAttempts()); clusterController.addClusterConnection(clusterConnection.getName(), dg, config); } else { TransportConfiguration[] tcConfigs = config.getTransportConfigurations(configuration); if (ActiveMQServerLogger.LOGGER.isDebugEnabled()) { ActiveMQServerLogger.LOGGER.debug(this + " defining cluster connection towards " + Arrays.toString(tcConfigs)); } clusterConnection = new ClusterConnectionImpl(this, tcConfigs, connector, new SimpleString(config.getName()), new SimpleString(config.getAddress()), config.getMinLargeMessageSize(), config.getClientFailureCheckPeriod(), config.getConnectionTTL(), config.getRetryInterval(), config.getRetryIntervalMultiplier(), config.getMaxRetryInterval(), config.getInitialConnectAttempts(), config.getReconnectAttempts(), config.getCallTimeout(), config.getCallFailoverTimeout(), config.isDuplicateDetection(), config.getMessageLoadBalancingType(), config.getConfirmationWindowSize(), executorFactory, server, postOffice, managementService, scheduledExecutor, config.getMaxHops(), nodeManager, server.getConfiguration().getClusterUser(), server.getConfiguration().getClusterPassword(), config.isAllowDirectConnectionsOnly(), config.getClusterNotificationInterval(), config.getClusterNotificationAttempts()); clusterController.addClusterConnection(clusterConnection.getName(), tcConfigs, config); } if (defaultClusterConnection == null) { defaultClusterConnection = clusterConnection; clusterController.setDefaultClusterConnectionName(defaultClusterConnection.getName()); } managementService.registerCluster(clusterConnection, config); clusterConnections.put(config.getName(), clusterConnection); if (ActiveMQServerLogger.LOGGER.isTraceEnabled()) { ActiveMQServerLogger.LOGGER.trace("ClusterConnection.start at " + clusterConnection, new Exception("trace")); } } private synchronized void deployBroadcastGroup(final BroadcastGroupConfiguration config) throws Exception { if (broadcastGroups.containsKey(config.getName())) { ActiveMQServerLogger.LOGGER.broadcastGroupAlreadyExists(config.getName()); return; } BroadcastGroup group = createBroadcastGroup(config); managementService.registerBroadcastGroup(group, config); } private BroadcastGroup createBroadcastGroup(BroadcastGroupConfiguration config) throws Exception { BroadcastGroup group = broadcastGroups.get(config.getName()); if (group == null) { group = new BroadcastGroupImpl(nodeManager, config.getName(), config.getBroadcastPeriod(), scheduledExecutor, config.getEndpointFactory()); for (String connectorInfo : config.getConnectorInfos()) { TransportConfiguration connector = configuration.getConnectorConfigurations().get(connectorInfo); if (connector == null) { logWarnNoConnector(connectorInfo, config.getName()); return null; } group.addConnector(connector); } } if (group.size() == 0) { logWarnNoConnector(config.getConnectorInfos().toString(), group.getName()); return null; } broadcastGroups.put(config.getName(), group); return group; } private void logWarnNoConnector(final String connectorName, final String bgName) { ActiveMQServerLogger.LOGGER.broadcastGroupNoConnector(connectorName, bgName); } private synchronized Collection<ClusterConnection> cloneClusterConnections() { ArrayList<ClusterConnection> list = new ArrayList<>(clusterConnections.values()); return list; } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package Util; import com.sun.org.apache.xpath.internal.operations.Variable; import java.text.DecimalFormat; import java.util.ArrayList; /** * * @author pichon */ public class ArbolDecision { ArrayList<ArrayList<String>> matrizDatos = new ArrayList<>(); ArrayList<String> nombreAtributos = new ArrayList<>(); int cantAtributos = 4; int cantClases = 2; String proceso; NodoArbolDecision estructuraArbol; //Geter y Seter public String getProceso() { return proceso; } public void setProceso(String proceso) { this.proceso = proceso; } public ArrayList<ArrayList<String>> getMatrizDatos() { return matrizDatos; } public void setMatrizDatos(ArrayList<ArrayList<String>> matrizDatos) { this.matrizDatos = matrizDatos; } public ArrayList<String> getNombreAtributos() { return nombreAtributos; } public void setNombreAtributos(ArrayList<String> nombreAtributos) { this.nombreAtributos = nombreAtributos; } public int getCantAtributos() { return cantAtributos; } public void setCantAtributos(int cantAtributos) { this.cantAtributos = cantAtributos; } public int getCantClases() { return cantClases; } public void setCantClases(int cantClases) { this.cantClases = cantClases; } public NodoArbolDecision getEstructuraArbol() { return estructuraArbol; } public void setEstructuraArbol(NodoArbolDecision estructuraArbol) { this.estructuraArbol = estructuraArbol; } //Metodos clase public void runArbolDecision() { //Inicializo el sistema int posClase = matrizDatos.get(0).size()-1; ArrayList<String> tiposClases = getClases(matrizDatos, posClase); ClaseAuxiliar salida = correrAlgoritmo(matrizDatos, cantClases, new ArrayList<>(nombreAtributos), tiposClases); proceso = "Comienzo algoritmo: \n\n" + salida.getProceso(); estructuraArbol = salida.getEstructuraArbol(); } private double getEntropia(ArrayList<ArrayList<String>> matrizAux, ArrayList<String> tiposClases) { double salida = 0; double total = matrizAux.size(); double[] observaciones = getObservaciones(matrizAux, tiposClases); for (int i = 0; i < tiposClases.size(); i++) { salida = salida + ((observaciones[i]/total)*logBase(tiposClases.size(), (observaciones[i]/total))); } return salida; } private double logBase(double base, double numero){ if (numero == 0) { return 0; } return -(Math.log(numero) / Math.log(base)); } private ArrayList<String> getClases(ArrayList<ArrayList<String>> matrizDatos, int columna) { ArrayList<String> salida = new ArrayList<>(); salida.add(matrizDatos.get(0).get(columna)); for (int j = 1; j < matrizDatos.size(); j++) { if (!salida.contains(matrizDatos.get(j).get(columna))) { salida.add(matrizDatos.get(j).get(columna)); } } return salida; } //Obtengo la cantidad de veces q aparece cada clase private double[] getObservaciones(ArrayList<ArrayList<String>> matrizAux, ArrayList<String> tiposClases) { double[] observaciones = new double[tiposClases.size()]; int posClase = matrizAux.get(0).size()-1; for (int i = 0; i < tiposClases.size(); i++) { observaciones[i] = 0; for (int j = 0; j < matrizAux.size(); j++) { if (matrizAux.get(j).get(posClase).equals(tiposClases.get(i))) { observaciones[i] = observaciones[i] + 1; } } } return observaciones; } private ClaseAuxiliar getEntropiaVariables(int columna, ArrayList<ArrayList<String>> matrizAux, ArrayList<String> tiposClases, String nombreAtributo, ArrayList<String> variablesAtributo) { ClaseAuxiliar salida = new ClaseAuxiliar(); ArrayList<Double> entropiaAtributos = new ArrayList<>(); String proceso = ""; double entropiaAux, total, aux; //Obtengo las observaciones de cada variable del atributo double[][] cantObserv = getObservAtributos(matrizAux, tiposClases, columna, variablesAtributo); for (int i = 0; i < variablesAtributo.size(); i++) { entropiaAux = 0; total = cantObserv[i][tiposClases.size()]; String stringAux = ""; for (int j = 0; j < tiposClases.size(); j++) { aux = cantObserv[i][j]/total; stringAux += "+("+cantObserv[i][j]+"/"+total+")log"+tiposClases.size()+"("+cantObserv[i][j]+"/"+total+")"; if (aux == 0 || aux == 1) { entropiaAux = 0; } entropiaAux = entropiaAux + (aux*logBase(tiposClases.size(), aux)); } stringAux = stringAux.substring(1); entropiaAtributos.add(entropiaAux); proceso += "Entropia[" + nombreAtributo + "-" + variablesAtributo.get(i)+"]: "+ stringAux +"= " + entropiaAux + "\n"; } salida.setProceso(proceso); salida.setValores(entropiaAtributos); return salida; } private double[][] getObservAtributos(ArrayList<ArrayList<String>> matrizAux, ArrayList<String> tiposClases, int columna, ArrayList<String> claseAtributo) { double[][] observaciones = new double[claseAtributo.size()][tiposClases.size()+1]; int posClase = matrizAux.get(0).size()-1; int contador; for (int k = 0; k < claseAtributo.size(); k++) { for (int i = 0; i < tiposClases.size(); i++) { observaciones[k][i] = 0; contador = 0; for (int j = 0; j < matrizAux.size(); j++) { if (matrizAux.get(j).get(columna).equals(claseAtributo.get(k))) { contador = contador +1; if (matrizAux.get(j).get(posClase).equals(tiposClases.get(i))) { observaciones[k][i] = observaciones[k][i] + 1; } } } observaciones[k][tiposClases.size()] = contador; } } return observaciones; } private ClaseAuxiliar getEntropiaAtributo(ArrayList<Double> entropiaVariables, ArrayList<ArrayList<String>> matrizAux, int columna, String atributo) { ClaseAuxiliar salida = new ClaseAuxiliar(); double total = matrizAux.size(); double entropia = 0; double observaciones; String stringAux = ""; ArrayList<String> variablesAtributo = getClases(matrizAux, columna); for (int i = 0; i < variablesAtributo.size(); i++) { observaciones = 0; for (int j = 0; j < matrizAux.size(); j++) { if (matrizAux.get(j).get(columna).equals(variablesAtributo.get(i))) { observaciones++; } } entropia += (observaciones/total)*entropiaVariables.get(i); stringAux += "+("+observaciones+"/"+total+")*"+entropiaVariables.get(i); } stringAux = "Entropia "+atributo+": "+stringAux.substring(1)+"= "+entropia+"\n"; stringAux += "Ganancia: 1-"+entropia+"= "+(1-entropia)+"\n\n"; salida.setProceso(stringAux); salida.setEntropia(entropia); return salida; } private int getMejor(ArrayList<Double> entropiaAtributos) { int index = 0; double valor = 999; for (int i = 0; i < entropiaAtributos.size(); i++) { if (valor > entropiaAtributos.get(i)) { valor = entropiaAtributos.get(i); index = i; } } return index; } private ArrayList<ArrayList<ArrayList<String>>> subdividirMariz(ArrayList<ArrayList<String>> matriz, int mejorAtributo, ArrayList<String> variables) { ArrayList<ArrayList<ArrayList<String>>> salida = new ArrayList<>(); for (int i = 0; i < variables.size(); i++) { ArrayList<ArrayList<String>> matrizAux = new ArrayList<>(); for (int j = 0; j < matriz.size(); j++) { if (matriz.get(j).get(mejorAtributo).equals(variables.get(i))) { matriz.get(j).remove(mejorAtributo); matrizAux.add(matriz.get(j)); } } salida.add(matrizAux); } return salida; } private ClaseAuxiliar correrAlgoritmo(ArrayList<ArrayList<String>> matrizAux, int cantClases, ArrayList<String> atributos, ArrayList<String> tiposClases) { ClaseAuxiliar salida = new ClaseAuxiliar(); ArrayList<ArrayList<Double>> calculos = new ArrayList<>(); ArrayList<Double> entropiaAtributos = new ArrayList<>(); String proceso = ""; double entropia; int columnaClase = matrizAux.get(0).size()-1; //Calculo la entropia general del sistema entropia = getEntropia(matrizAux, tiposClases); proceso += "Entropia general: " + entropia + "\n"; if (entropia == 0) { salida.setProceso(proceso); salida.setNodo(false); salida.setClase("Clase: "+ matrizAux.get(0).get(columnaClase)); return salida; } // if (atributos.isEmpty()){ // proceso += "Error: ya no hay atributos a clasificar"; // salida.setProceso(proceso); // salida.setNodo(false); // salida.setClase("Error"); // return salida; // } //Para cada atributo caluculo la entropia for (int i = 0; i < matrizAux.get(0).size()-1; i++) { //Obtengo las variables del atributo ArrayList<String> variablesAtributo = getClases(matrizAux, i); proceso += "Variables del atributo "+ atributos.get(i) +": " + variablesAtributo + "\n"; //Si la entropia es distinta de cero y solo queda una variable para el atributo, //y no quedan mas atributos por los cuales clasificar salgo. if ((variablesAtributo.size() == 1)) { //Consultar esta parte ------------------------- proceso += "Solo queda una variable para el atributo"; salida.setProceso(proceso); salida.setNodo(false); //Debo obtener el porcentaje en el q aparece cada clase ArrayList<String> clasePorcentaje = new ArrayList<>(); ArrayList<Double> porcentaje = new ArrayList<>(); int total = matrizAux.size(); String claseIndefinida = ""; for (int j = 0; j < cantClases; j++) { int cont = 0; for (int k = 0; k < matrizAux.size(); k++) { if (matrizAux.get(k).get(columnaClase).equals(tiposClases.get(j))) { cont++; } } clasePorcentaje.add(tiposClases.get(j)); double porcent = ((double)cont)/((double)total); porcentaje.add(porcent); claseIndefinida += "Clase: "+ tiposClases.get(j)+" : "+new DecimalFormat("#.##").format(porcent)+"%\n"; } salida.setClase(claseIndefinida); salida.setClasePorcentaje(clasePorcentaje); salida.setPorcentaje(porcentaje); salida.setIndefinida(true); return salida; } //Calculo la entropia de cada variable del atributo ClaseAuxiliar datosEntropiaVariable = getEntropiaVariables(i, matrizAux, tiposClases, atributos.get(i), variablesAtributo); calculos.add(datosEntropiaVariable.getValores()); proceso += datosEntropiaVariable.getProceso(); //Calcular la entropia total del atributo y su ratio ClaseAuxiliar datosEntropiaAtributo = getEntropiaAtributo(datosEntropiaVariable.getValores(), matrizAux, i, atributos.get(i)); entropiaAtributos.add(datosEntropiaAtributo.getEntropia()); proceso += datosEntropiaAtributo.getProceso(); } //Selecciono el atributo q mejor represente al conjunto if (entropiaAtributos.isEmpty()) { proceso += "Ya no es posible subdividir\n"; salida.setProceso(proceso); salida.setNodo(false); salida.setClase("Error"); return salida; } int indexMejorAtributo = getMejor(entropiaAtributos); String nombreMejorAtriburo = atributos.get(indexMejorAtributo); ArrayList<String> subAtributos = atributos; proceso += "Atributo seleccionado como mejor: " + atributos.get(indexMejorAtributo)+"\n"; subAtributos.remove(indexMejorAtributo); //Creo un nuevo nodo del arbol NodoArbolDecision nodoArbol = new NodoArbolDecision(); nodoArbol.setNombeAtributo(nombreMejorAtriburo); //Busco el indice del atributo en la matriz de entrada for (int i = 0; i < nombreAtributos.size(); i++) { if (nombreMejorAtriburo.equals(nombreAtributos.get(i))) { nodoArbol.setIndice(i); break; } } //Obtengo las variables del atributo seleccionado ArrayList<String> variablesAtri = getClases(matrizAux, indexMejorAtributo); nodoArbol.setVariablesAtributo(variablesAtri); //Genero las sub tablas ArrayList<ArrayList<ArrayList<String>>> subMatriz = subdividirMariz(matrizAux, indexMejorAtributo, variablesAtri); proceso += "Submatrices: \n"; for (int i = 0; i < subMatriz.size(); i++) { proceso += "Variable: "+variablesAtri.get(i)+" "+subMatriz.get(i)+"\n"; } //Llamo recursivamente a la funcion ArrayList<NodoArbolDecision> estructuraA = new ArrayList<>(); ArrayList<String> clasificacion = new ArrayList<>(); boolean tieneHijos = false; for (int i = 0; i < subMatriz.size(); i++) { proceso += "\nSub arbol de decision atributo: "+ nombreMejorAtriburo +", variable: "+ variablesAtri.get(i) +"\n"; proceso += "Atributos: "+subAtributos+"\n"; ClaseAuxiliar salidaAux = correrAlgoritmo(subMatriz.get(i), cantClases, subAtributos, tiposClases); proceso += salidaAux.getProceso()+"\n"; if (salidaAux.isNodo()) { estructuraA.add(salidaAux.getEstructuraArbol()); clasificacion.add(null); tieneHijos = true; }else{ estructuraA.add(null); clasificacion.add(salidaAux.getClase()); } } nodoArbol.setHijos(estructuraA); nodoArbol.setClase(clasificacion); if (tieneHijos) { salida.setNodo(true); } salida.setProceso(proceso); salida.setEstructuraArbol(nodoArbol); return salida; } public String clasificarEntradas(ArrayList<String> clasificar, NodoArbolDecision estructuraArbol) { String salida = ""; for (int j = 0; j < estructuraArbol.getVariablesAtributo().size(); j++) { if (clasificar.get(estructuraArbol.getIndice()).equals(estructuraArbol.getVariablesAtributo().get(j))) { if (estructuraArbol.getHijos().get(j) != null) { //Llamo recursivamente a clasificar entrada salida = clasificarEntradas(clasificar, estructuraArbol.getHijos().get(j)); }else{ //Si es null es porque el nodo es una hoja por lo tanto devuelvo la clase salida = estructuraArbol.getClase().get(j); } } } return salida; } public String mostrarArbol(NodoArbolDecision estructuraArbol) { String salida = "Arbol de decision:\n"; salida += "Atributo: "+estructuraArbol.getNombeAtributo()+"\n"; for (int i = 0; i < estructuraArbol.getVariablesAtributo().size(); i++) { salida += "\tVariable: "+estructuraArbol.getVariablesAtributo().get(i)+"\n"; if (estructuraArbol.getHijos().get(i)!=null) { mostrarArbol(estructuraArbol.getHijos().get(i)); }else{ salida += "Clase: "+estructuraArbol.getClase().get(i)+"\n"; } } return salida; } }
/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.bigtable.hbase.adapters.filters; import com.google.cloud.bigtable.data.v2.models.Filters; import com.google.cloud.bigtable.hbase.adapters.read.DefaultReadHooks; import com.google.cloud.bigtable.util.RowKeyWrapper; import com.google.common.base.Optional; import com.google.common.collect.ImmutableRangeSet; import com.google.common.collect.Range; import com.google.common.collect.RangeSet; import com.google.protobuf.ByteString; import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.filter.PageFilter; import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.filter.QualifierFilter; import org.apache.hadoop.hbase.filter.ValueFilter; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @RunWith(JUnit4.class) public class TestFilterListAdapter { // Adapting a filterlist is a cooperative between the filter list adapter // and the filter adapter. FilterAdapter filterAdapter = FilterAdapter.buildAdapter(); Scan emptyScan = new Scan(); FilterAdapterContext emptyScanContext = null; @Before public void setup() { emptyScanContext = new FilterAdapterContext(emptyScan, null); } FilterList makeFilterList(Operator filterOperator) { return new FilterList( filterOperator, new ValueFilter(CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("value"))), new ValueFilter(CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("value2")))); } @Test public void interleavedFiltersAreAdapted() throws IOException { FilterList filterList = makeFilterList(Operator.MUST_PASS_ONE); List<Filter> filters = filterList.getFilters(); Filters.Filter expectedFilter = adapt(filterList); Assert.assertEquals(filters.size(), expectedFilter.toProto().getInterleave().getFiltersCount()); for (int i = 0; i < filters.size(); i++) { Assert.assertEquals( adapt(filters.get(i)).toProto(), expectedFilter.toProto().getInterleave().getFilters(i)); } } @Test public void chainedFiltersAreAdapted() throws IOException { FilterList filterList = makeFilterList(Operator.MUST_PASS_ALL); List<Filter> filters = filterList.getFilters(); Filters.Filter expectedFilter = adapt(filterList); Assert.assertEquals(filters.size(), expectedFilter.toProto().getChain().getFiltersCount()); for (int i = 0; i < filters.size(); i++) { Assert.assertEquals( adapt(filters.get(i)).toProto(), expectedFilter.toProto().getChain().getFilters(i)); } } @Test public void compositeFilterSupportStatusIsReturnedForUnsupportedChildFilters() { FilterListAdapter filterListAdapter = new FilterListAdapter( new FilterAdapter() { @Override public void collectUnsupportedStatuses( FilterAdapterContext context, Filter filter, List<FilterSupportStatus> statuses) { Assert.assertEquals( "FilterListDepth should be incremented in isFilterSupported.", 1, context.getFilterListDepth()); statuses.add(FilterSupportStatus.newNotSupported("Test")); } }); FilterList filterList = makeFilterList(Operator.MUST_PASS_ALL); FilterSupportStatus status = filterListAdapter.isFilterSupported(emptyScanContext, filterList); Assert.assertFalse( "collectUnsupportedStatuses should have been invoked returning unsupported statuses.", status.isSupported()); } @Test public void collectUnsupportedStatusesStartsANewContext() { FilterListAdapter filterListAdapter = new FilterListAdapter( new FilterAdapter() { @Override public void collectUnsupportedStatuses( FilterAdapterContext context, Filter filter, List<FilterSupportStatus> statuses) { Assert.assertEquals( "FilterListDepth should be incremented in isFilterSupported.", 1, context.getFilterListDepth()); statuses.add(FilterSupportStatus.newNotSupported("Test")); } }); Assert.assertEquals("Initial depth should be 0.", 0, emptyScanContext.getFilterListDepth()); FilterList filterList = makeFilterList(Operator.MUST_PASS_ALL); FilterSupportStatus status = filterListAdapter.isFilterSupported(emptyScanContext, filterList); Assert.assertFalse( "collectUnsupportedStatuses should have been invoked returning unsupported statuses.", status.isSupported()); } @Test /** FilterListAdapter should handle the fact that PageFilterAdapter returns null. */ public void testPageFilter() throws IOException { byte[] qualA = Bytes.toBytes("qualA"); PageFilter pageFilter = new PageFilter(20); FilterList filterList = new FilterList( Operator.MUST_PASS_ALL, new QualifierFilter(CompareOp.EQUAL, new BinaryComparator(qualA)), pageFilter); FilterAdapter adapter = FilterAdapter.buildAdapter(); Optional<Filters.Filter> adapted = adapter.adaptFilter( new FilterAdapterContext(new Scan(), new DefaultReadHooks()), filterList); Assert.assertTrue(adapted.isPresent()); Optional<Filters.Filter> qualifierAdapted = adapter.adaptFilter( new FilterAdapterContext(new Scan(), new DefaultReadHooks()), filterList.getFilters().get(0)); Assert.assertEquals(qualifierAdapted.get().toProto(), adapted.get().toProto()); } @Test public void testChainedIndexHintIntersection() { FilterAdapter adapter = FilterAdapter.buildAdapter(); PrefixFilter p1 = new PrefixFilter("a".getBytes()); PrefixFilter p2 = new PrefixFilter("abc".getBytes()); FilterList filterList = new FilterList(Operator.MUST_PASS_ALL, p1, p2); RangeSet<RowKeyWrapper> actual = adapter.getIndexScanHint(filterList); RangeSet<RowKeyWrapper> expected = ImmutableRangeSet.of( Range.closedOpen( new RowKeyWrapper(ByteString.copyFromUtf8("abc")), new RowKeyWrapper(ByteString.copyFromUtf8("abd")))); Assert.assertEquals(expected, actual); } @Test public void testChainedIndexHintDisjointIntersection() { FilterAdapter adapter = FilterAdapter.buildAdapter(); PrefixFilter p1 = new PrefixFilter("a".getBytes()); PrefixFilter p2 = new PrefixFilter("b".getBytes()); FilterList filterList = new FilterList(Operator.MUST_PASS_ALL, p1, p2); RangeSet<RowKeyWrapper> actual = adapter.getIndexScanHint(filterList); RangeSet<RowKeyWrapper> expected = ImmutableRangeSet.of(); Assert.assertEquals(expected, actual); } @Test public void testInterleavedIndexHintUnion() { FilterAdapter adapter = FilterAdapter.buildAdapter(); PrefixFilter p1 = new PrefixFilter("a".getBytes()); PrefixFilter p2 = new PrefixFilter("abc".getBytes()); FilterList filterList = new FilterList(Operator.MUST_PASS_ONE, p1, p2); RangeSet<RowKeyWrapper> actual = adapter.getIndexScanHint(filterList); RangeSet<RowKeyWrapper> expected = ImmutableRangeSet.of( Range.closedOpen( new RowKeyWrapper(ByteString.copyFromUtf8("a")), new RowKeyWrapper(ByteString.copyFromUtf8("b")))); Assert.assertEquals(expected, actual); } @Test public void testInterleavedIndexHintDisjointUnion() { FilterAdapter adapter = FilterAdapter.buildAdapter(); PrefixFilter p1 = new PrefixFilter("a".getBytes()); PrefixFilter p2 = new PrefixFilter("c".getBytes()); FilterList filterList = new FilterList(Operator.MUST_PASS_ONE, p1, p2); RangeSet<RowKeyWrapper> actual = adapter.getIndexScanHint(filterList); RangeSet<RowKeyWrapper> expected = ImmutableRangeSet.<RowKeyWrapper>builder() .add( Range.closedOpen( new RowKeyWrapper(ByteString.copyFromUtf8("a")), new RowKeyWrapper(ByteString.copyFromUtf8("b")))) .add( Range.closedOpen( new RowKeyWrapper(ByteString.copyFromUtf8("c")), new RowKeyWrapper(ByteString.copyFromUtf8("d")))) .build(); Assert.assertEquals(expected, actual); } protected Filters.Filter adapt(Filter filter) throws IOException { return filterAdapter.adaptFilter(emptyScanContext, filter).get(); } }
package org.clafer.ir.analysis.deduction; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.clafer.collection.Pair; import org.clafer.common.UnsatisfiableException; import org.clafer.domain.Domain; import org.clafer.ir.IllegalIntException; import org.clafer.ir.IllegalSetException; import org.clafer.ir.IllegalStringException; import org.clafer.ir.IrAcyclic; import org.clafer.ir.IrAdd; import org.clafer.ir.IrAllDifferent; import org.clafer.ir.IrAnd; import org.clafer.ir.IrArrayEquality; import org.clafer.ir.IrArrayToSet; import org.clafer.ir.IrBoolChannel; import org.clafer.ir.IrBoolExpr; import org.clafer.ir.IrBoolVar; import org.clafer.ir.IrCard; import org.clafer.ir.IrCompare; import org.clafer.ir.IrCount; import org.clafer.ir.IrElement; import org.clafer.ir.IrIfOnlyIf; import org.clafer.ir.IrIntChannel; import org.clafer.ir.IrIntVar; import org.clafer.ir.IrJoinFunction; import org.clafer.ir.IrJoinRelation; import org.clafer.ir.IrMember; import org.clafer.ir.IrMinus; import org.clafer.ir.IrModule; import org.clafer.ir.IrMul; import org.clafer.ir.IrNot; import org.clafer.ir.IrNotMember; import org.clafer.ir.IrOffset; import org.clafer.ir.IrSelectN; import org.clafer.ir.IrSetDifference; import org.clafer.ir.IrSetElement; import org.clafer.ir.IrSetEquality; import org.clafer.ir.IrSetMin; import org.clafer.ir.IrSetTernary; import org.clafer.ir.IrSetUnion; import org.clafer.ir.IrSetVar; import org.clafer.ir.IrSingleton; import org.clafer.ir.IrSortSets; import org.clafer.ir.IrSortStrings; import org.clafer.ir.IrSortStringsChannel; import org.clafer.ir.IrStringCompare; import org.clafer.ir.IrStringVar; import org.clafer.ir.IrSubsetEq; import org.clafer.ir.IrTernary; import org.clafer.ir.IrVar; /** * Feasibility-based bounds tightening. * * In addition, removes some non-bounds as well and coalesces variables when * possible. * * @author jimmy */ public class FBBT { private final Map<Class<?>, BoolDeducer<?>> boolDeducers; private final Map<Class<?>, IntDeducer<?>> intDeducers; private final Map<Class<?>, SetDeducer<?>> setDeducers; public FBBT() { boolDeducers = new HashMap<>(); boolDeducers.put(IrAcyclic.class, new AcyclicDeducer()); boolDeducers.put(IrAllDifferent.class, new AllDifferentDeducer()); boolDeducers.put(IrAnd.class, new AndDeducer()); boolDeducers.put(IrArrayEquality.class, new ArrayEqualityDeducer()); boolDeducers.put(IrBoolChannel.class, new BoolChannelDeducer()); boolDeducers.put(IrBoolVar.class, new BoolVarDeducer()); boolDeducers.put(IrCompare.class, new CompareDeducer()); boolDeducers.put(IrIfOnlyIf.class, new IfOnlyIfDeducer()); boolDeducers.put(IrIntChannel.class, new IntChannelDeducer()); boolDeducers.put(IrMember.class, new MemberDeducer()); boolDeducers.put(IrNot.class, new NotDeducer()); boolDeducers.put(IrNotMember.class, new NotMemberDeducer()); boolDeducers.put(IrSelectN.class, new SelectNDeducer()); boolDeducers.put(IrSetEquality.class, new SetEqualityDeducer()); boolDeducers.put(IrStringCompare.class, new StringCompareDeducer()); boolDeducers.put(IrSortSets.class, new SortSetsDeducer()); boolDeducers.put(IrSortStrings.class, new SortStringsDeducer()); boolDeducers.put(IrSortStringsChannel.class, new SortStringsChannelDeducer()); boolDeducers.put(IrSubsetEq.class, new SubsetEqDeducer()); intDeducers = new HashMap<>(); intDeducers.put(IrAdd.class, new AddDeducer()); intDeducers.put(IrCard.class, new CardDeducer()); intDeducers.put(IrCount.class, new CountDeducer()); intDeducers.put(IrElement.class, new ElementDeducer()); intDeducers.put(IrMinus.class, new MinusDeducer()); intDeducers.put(IrMul.class, new MulDeducer()); intDeducers.put(IrSetMin.class, new SetMinDeducer()); intDeducers.put(IrTernary.class, new TernaryDeducer()); setDeducers = new HashMap<>(); setDeducers.put(IrArrayToSet.class, new ArrayToSetDeducer()); setDeducers.put(IrJoinFunction.class, new JoinFunctionDeducer()); setDeducers.put(IrJoinRelation.class, new JoinRelationDeducer()); setDeducers.put(IrOffset.class, new OffsetDeducer()); setDeducers.put(IrSetDifference.class, new SetDifferenceDeducer()); setDeducers.put(IrSetElement.class, new SetElementDeducer()); setDeducers.put(IrSetTernary.class, new SetTernaryDeducer()); setDeducers.put(IrSetUnion.class, new SetUnionDeducer()); setDeducers.put(IrSingleton.class, new SingletonDeducer()); } public Pair<Coalesce, IrModule> propagate(IrModule module) { try { Set<IrBoolExpr> changed = new HashSet<>(); changed.addAll(module.getConstraints()); State state = new State(module); Coalesce coalesce = propagateImpl(state, changed); Coalesce cur = coalesce; while (!cur.isEmpty()) { cur = propagateImpl(state, changed); coalesce = coalesce.compose(cur); } return new Pair<>(coalesce, state.toModule()); } catch (IllegalIntException | IllegalSetException | IllegalStringException e) { throw new UnsatisfiableException(e); } } private Coalesce propagate(Deduction deduction, State state) { try { Coalesce coalesce = deduction.apply(state.setVars, state.stringVars); if (coalesce.isEmpty()) { return coalesce; } Set<IrBoolExpr> changed = new HashSet<>(); state.apply(coalesce, changed); Coalesce cur = coalesce; while (!cur.isEmpty()) { cur = propagateImpl(state, changed); coalesce = coalesce.compose(cur); } return coalesce; } catch (IllegalIntException | IllegalSetException | IllegalStringException e) { throw new UnsatisfiableException(e); } } private Coalesce propagateImpl(State state, Set<IrBoolExpr> changed) { Deduction deduction = new Deduction(boolDeducers, intDeducers, setDeducers); changed.forEach(deduction::tautology); assert deduction.checkInvariants(); Coalesce coalesce = deduction.apply(state.setVars, state.stringVars); if (coalesce.isEmpty()) { return coalesce; } changed.clear(); state.apply(coalesce, changed); return coalesce; } public Pair<Coalesce, IrModule> constructiveDisjunction(IrBoolExpr case1, IrBoolExpr case2, IrModule module) { Deduction deduction = new Deduction(boolDeducers, intDeducers, setDeducers); module.getConstraints().forEach(deduction::tautology); assert deduction.checkInvariants(); try { Deduction case1Deduction = new Deduction(deduction); case1Deduction.tautology(case1); Coalesce coalesce1 = propagate(case1Deduction, new State(module, case1)); try { Deduction case2Deduction = new Deduction(deduction); case2Deduction.tautology(case2); Coalesce coalesce2 = propagate(case2Deduction, new State(module, case2)); coalesce1.forEachIntVar((key, value1) -> { IrIntVar value2 = coalesce2.get(key); if (value2 != key) { Domain combine = value1.getDomain().union(value2.getDomain()); deduction.within(key, combine); } }); coalesce1.forEachSetVar((key, value1) -> { IrSetVar value2 = coalesce2.get(key); if (value2 != key) { Domain combineKer = value1.getKer().intersection(value2.getKer()); Domain combineEnv = value1.getEnv().union(value2.getEnv()); Domain combineCard = value1.getCard().union(value2.getCard()); deduction.kerContains(key, combineKer); deduction.envSubsetOf(key, combineEnv); deduction.cardWithin(key, combineCard); } }); } catch (UnsatisfiableException e) { deduction.contradiction(case2); } } catch (UnsatisfiableException e) { deduction.contradiction(case1); } State state = new State(module); Coalesce coalesce = propagate(deduction, state); return new Pair<>(coalesce, state.toModule()); } private static class State { final IrBoolExpr[] constraints; int size; Set<IrSetVar> setVars = new HashSet<>(); Set<IrStringVar> stringVars = new HashSet<>(); Set<IrSetVar> reuseSetVars = new HashSet<>(); Set<IrStringVar> reuseStringVars = new HashSet<>(); State(IrModule module, IrBoolExpr tautology) { Collection<IrBoolExpr> c = module.getConstraints(); if (tautology == null) { this.constraints = c.toArray(new IrBoolExpr[c.size()]); } else { this.constraints = c.toArray(new IrBoolExpr[c.size() + 1]); this.constraints[c.size()] = tautology; } this.size = constraints.length; Set<IrVar> vars = module.getVariables(); for (IrVar var : vars) { if (!var.isConstant()) { if (var instanceof IrSetVar) { setVars.add((IrSetVar) var); } else if (var instanceof IrStringVar) { stringVars.add((IrStringVar) var); } } } } State(IrModule module) { this(module, null); } void apply(Coalesce coalesce, Set<IrBoolExpr> changed) { for (int i = 0; i < size;) { IrBoolExpr newConstraint = coalesce.rewrite(constraints[i], null); if (newConstraint.getDomain().isFalse()) { throw new UnsatisfiableException(); } else if (newConstraint.getDomain().isTrue()) { size--; constraints[i] = constraints[size]; constraints[size] = null; } else { if (constraints[i] != newConstraint) { changed.add(newConstraint); constraints[i] = newConstraint; } i++; } } setVars.forEach(x -> reuseSetVars.add((IrSetVar) coalesce.rewrite(x, null))); Set<IrSetVar> tempSetVars = setVars; setVars = reuseSetVars; reuseSetVars = tempSetVars; reuseSetVars.clear(); stringVars.forEach(x -> reuseStringVars.add((IrStringVar) coalesce.rewrite(x, null))); Set<IrStringVar> tempStringVars = stringVars; stringVars = reuseStringVars; reuseStringVars = tempStringVars; reuseStringVars.clear(); } IrModule toModule() { IrModule module = new IrModule(); for (int i = 0; i < size; i++) { module.addConstraints(constraints[i]); } return module; } } }
package org.biopax.validator; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.biopax.paxtools.io.SimpleIOHandler; import org.biopax.paxtools.model.Model; import org.biopax.paxtools.normalizer.Normalizer; import org.biopax.validator.api.Validator; import org.biopax.validator.api.ValidatorUtils; import org.biopax.validator.api.beans.Validation; import org.biopax.validator.api.beans.ValidatorResponse; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.core.io.Resource; import org.springframework.util.ResourceUtils; import javax.xml.transform.Source; import javax.xml.transform.stream.StreamSource; import java.io.*; import java.util.Collection; import java.util.HashSet; import java.util.Set; /** * PC BioPAX Validator (console), which * checks from the user input or a "batch" file. * <p> * See: README.txt, context.xml, messages.properties * * @author rodche */ public class Main { static final Log log = LogFactory.getLog(Main.class); static ApplicationContext ctx; static boolean autofix = false; static int maxErrors = 0; static final String EXT = ".modified.owl"; static String profile = null; static String xmlBase = null; static String outFormat = "html"; static String output = null; public static void main(String[] args) throws Exception { if (args == null || args.length == 0) { log.warn("At least input file/dir must be specified."); printHelpAndQuit(); } String input = args[0]; if (input == null || input.isEmpty() || input.startsWith("--")) { log.warn("Input (file, url, or directory) is probably missing"); printHelpAndQuit(); } // match optional parameters for (int i = 1; i < args.length; i++) { if ("--auto-fix".equalsIgnoreCase(args[i])) { autofix = true; } else if (args[i].startsWith("--max-errors=")) { String n = args[i].substring(13); maxErrors = Integer.parseInt(n); } else if (args[i].startsWith("--profile=")) { profile = args[i].substring(10); } else if (args[i].startsWith("--xmlBase=")) { xmlBase = args[i].substring(10); } else if (args[i].startsWith("--output=")) { output = args[i].substring(9); } else if (args[i].startsWith("--out-format=")) { outFormat = args[i].substring(13); if (outFormat.isEmpty()) outFormat = "html"; } } // this does 90% of the job ;) ctx = new ClassPathXmlApplicationContext( "META-INF/spring/appContext-loadTimeWeaving.xml", "META-INF/spring/appContext-validator.xml"); // Rules are now loaded, and AOP is listening for BioPAX model method calls. // get the beans to work with Validator validator = (Validator) ctx.getBean("biopaxValidator"); // go validate all runBatch(validator, getResourcesToValidate(input)); } private static void printHelpAndQuit() { final String usage = "\nThe BioPAX Validator v3\n\n" + "Usage (arguments):\n <input> [--output=<filename>] [--out-format=xml|html] [--auto-fix] " + "[--xmlBase=<base>] [--max-errors=<n>] [--profile=notstrict]\n\n" + "Given --output=<filename>, a one-file validation report will be \n" + "generated (HTML or XML) instead of default report file(s) in the \n" + "current directory. Optional arguments can go in any order.\n" + "For example:\n" + " path/dir --out-format=xml\n" + " list:batch_file.txt --output=reports.html\n" + " file:biopax.owl --out-format=xml --auto-fix\n" + " http://www.some.net/data.owl\n\n" + "A batch file should list one task (resource) per line, i.e.,\n" + "file:path/file or URL (to BioPAX data)\n" + "If '--auto-fix' option was used, it also creates a new BioPAX file \n" + "for each input file in the current working directory \n" + "(adding '.modified.owl' exention). If the outFormat file extension \n" + "is '.html', the XML result will be auto-transformed to a stand-alone \n" + "HTML/javascript page, which is very similar to what the online version returns."; System.out.println(usage); System.exit(-1); } protected static void runBatch(Validator validator, Collection<Resource> resources) throws IOException { //collect all reports in this object (only if --output option was used) final ValidatorResponse consolidatedReport = new ValidatorResponse(); // Read from the batch and validate from file, id or url, line-by-line (stops on first empty line) for (Resource resource : resources) { Validation result = new Validation(new BiopaxIdentifier(), resource.getDescription(), autofix, null, maxErrors, profile); result.setDescription(resource.getDescription()); log.info("BioPAX DATA IMPORT FROM: " + result.getDescription()); try { validator.importModel(result, resource.getInputStream()); validator.validate(result); //if autofix is enabled, then do normalize too if (autofix) { Model model = (Model) result.getModel(); Normalizer normalizer = new Normalizer(); normalizer.setXmlBase(xmlBase); //if xmlBase is null, the model's one is used normalizer.normalize(model); } if (output != null) consolidatedReport.addValidationResult(result); } catch (Exception e) { log.error("failed", e); } final String filename = outFileName(result); PrintWriter writer; // save modified (normalized) biopax if the option was used if (autofix) { Model model = (Model) result.getModel(); (new SimpleIOHandler()).convertToOWL(model, new FileOutputStream(filename + EXT)); } // remove the BioPAX data before writing report result.setModel(null); result.setModelData(null); // save the individual validation results //unless the user specified the output file explicitly if (output == null || output.isEmpty()) { writer = new PrintWriter(filename + ".validation." + outFormat); Source xsltSrc = (outFormat.equalsIgnoreCase("html")) ? new StreamSource(ctx.getResource("classpath:html-result.xsl").getInputStream()) : null; ValidatorUtils.write(result, writer, xsltSrc); writer.close(); } validator.getResults().remove(result); log.info("Done with " + filename); } // save if the user specified the output file explicitly if (output != null) { Writer writer = new PrintWriter(output); Source xsltSrc = (outFormat.equalsIgnoreCase("html")) ? new StreamSource(ctx.getResource("classpath:html-result.xsl").getInputStream()) : null; ValidatorUtils.write(consolidatedReport, writer, xsltSrc); writer.close(); } } private static String outFileName(Validation result) { String filename = result.getDescription(); // if was URL, create a shorter name; // remove ']', '[', and ending '/', if any filename = filename.replaceAll("\\[|\\]", "").replaceFirst("/&", ""); int idx = filename.lastIndexOf('/'); if (idx >= 0) { if (idx < filename.length() - 1) filename = filename.substring(idx + 1); } return filename; } public static Collection<Resource> getResourcesToValidate(String input) throws IOException { Set<Resource> setRes = new HashSet<Resource>(); File fileOrDir = new File(input); if (fileOrDir.isDirectory()) { // validate all the OWL files in the folder FilenameFilter filter = new FilenameFilter() { public boolean accept(File dir, String name) { return (name.endsWith(".owl")); } }; for (String s : fileOrDir.list(filter)) { String uri = "file:" + fileOrDir.getCanonicalPath() + File.separator + s; setRes.add(ctx.getResource(uri)); } } else if (input.startsWith("list:")) { // consider it's a file that contains a list of (pseudo-)URLs String batchFile = input.replaceFirst("list:", "file:"); Reader isr = new InputStreamReader(ctx.getResource(batchFile).getInputStream()); BufferedReader reader = new BufferedReader(isr); String line; while ((line = reader.readLine()) != null && !"".equals(line.trim())) { // check the source URL if (!ResourceUtils.isUrl(line)) { log.error("Invalid URL: " + line + ". A resource must be either a " + "pseudo URL (classpath: or file:) or standard URL!"); continue; } setRes.add(ctx.getResource(line)); } reader.close(); } else { // a single local OWL file or remote data Resource resource = null; if (!ResourceUtils.isUrl(input)) input = "file:" + input; resource = ctx.getResource(input); setRes.add(resource); } return setRes; } }
/** * Copyright (c) 2014 SQUARESPACE, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squarespace.template; import static com.squarespace.template.Patterns.EOF_CHAR; import static com.squarespace.template.Patterns.META_LEFT_CHAR; import static com.squarespace.template.Patterns.META_RIGHT_CHAR; import static com.squarespace.template.Patterns.NEWLINE_CHAR; import static com.squarespace.template.Patterns.POUND_CHAR; import static com.squarespace.template.SyntaxErrorType.BINDVAR_EXPECTS_NAME; import static com.squarespace.template.SyntaxErrorType.CTXVAR_EXPECTS_BINDINGS; import static com.squarespace.template.SyntaxErrorType.CTXVAR_EXPECTS_NAME; import static com.squarespace.template.SyntaxErrorType.EXTRA_CHARS; import static com.squarespace.template.SyntaxErrorType.FORMATTER_ARGS_INVALID; import static com.squarespace.template.SyntaxErrorType.FORMATTER_INVALID; import static com.squarespace.template.SyntaxErrorType.FORMATTER_NEEDS_ARGS; import static com.squarespace.template.SyntaxErrorType.FORMATTER_UNKNOWN; import static com.squarespace.template.SyntaxErrorType.IF_EMPTY; import static com.squarespace.template.SyntaxErrorType.IF_EXPECTED_VAROP; import static com.squarespace.template.SyntaxErrorType.IF_TOO_MANY_OPERATORS; import static com.squarespace.template.SyntaxErrorType.IF_TOO_MANY_VARS; import static com.squarespace.template.SyntaxErrorType.INJECT_EXPECTS_NAME; import static com.squarespace.template.SyntaxErrorType.INJECT_EXPECTS_PATH; import static com.squarespace.template.SyntaxErrorType.INVALID_INSTRUCTION; import static com.squarespace.template.SyntaxErrorType.MACRO_EXPECTS_NAME; import static com.squarespace.template.SyntaxErrorType.MISSING_SECTION_KEYWORD; import static com.squarespace.template.SyntaxErrorType.MISSING_VARIABLE_NAME; import static com.squarespace.template.SyntaxErrorType.MISSING_WITH_KEYWORD; import static com.squarespace.template.SyntaxErrorType.OR_EXPECTED_PREDICATE; import static com.squarespace.template.SyntaxErrorType.PREDICATE_ARGS_INVALID; import static com.squarespace.template.SyntaxErrorType.PREDICATE_NEEDS_ARGS; import static com.squarespace.template.SyntaxErrorType.PREDICATE_UNKNOWN; import static com.squarespace.template.SyntaxErrorType.VARIABLE_EXPECTED; import static com.squarespace.template.SyntaxErrorType.WHITESPACE_EXPECTED; import java.util.ArrayList; import java.util.Collections; import java.util.List; import com.squarespace.template.Instructions.BindVarInst; import com.squarespace.template.Instructions.CtxVarInst; import com.squarespace.template.Instructions.EvalInst; import com.squarespace.template.Instructions.IncludeInst; import com.squarespace.template.Instructions.InjectInst; import com.squarespace.template.Instructions.MacroInst; import com.squarespace.template.Instructions.PredicateInst; import com.squarespace.template.Instructions.VariableInst; /** * Tokenizes characters into instructions, expressing all rules for well-formed * combinations of instructions. * * The Tokenizer is also responsible for the internal composition of an instruction, * where the CodeMachine is responsible for the overall composition of instructions * into an executable form. * * If a potential instruction sequence cannot be parsed into an instruction, the * degree to which it matches an instruction pattern is an important factor in * whether to throw an error or emit the invalid instruction sequence as plain text. */ public class Tokenizer { // Increased at the request of template developers. private final static int IF_VARIABLE_LIMIT = 30; private final String raw; private final int length; private final CodeSink sink; private final PredicateTable predicateTable; private final FormatterTable formatterTable; private final TokenMatcher matcher; private final CodeMaker maker; private State state; private List<ErrorInfo> errors; boolean validate = false; boolean preprocess = false; private int textLine; private int textOffset; private int instLine; private int instOffset; private int index = 0; private int save = 0; private int metaLeft = -1; private int lineCounter = 0; private int lineIndex = 0; public Tokenizer( String raw, CodeSink sink, FormatterTable formatterTable, PredicateTable predicateTable) { this(raw, sink, false, formatterTable, predicateTable); } public Tokenizer( String raw, CodeSink sink, boolean preprocess, FormatterTable formatterTable, PredicateTable predicateTable) { this.raw = raw; this.length = raw.length(); this.sink = sink; this.preprocess = preprocess; this.formatterTable = formatterTable; this.predicateTable = predicateTable; this.matcher = new TokenMatcher(raw); this.maker = new CodeMaker(); this.state = stateInitial; } public boolean consume() throws CodeSyntaxException { do { state = state.transition(); } while (state != stateEOF); sink.complete(); return (validate) ? errors.size() == 0 : true; } /** * Puts the Tokenizer in a mode where it collects a list of errors, rather than * throwing an exception on the first parse error. */ public void setValidate() { this.validate = true; if (this.errors == null) { this.errors = new ArrayList<>(4); } } public void setPreprocess() { this.preprocess = true; } public List<ErrorInfo> getErrors() { if (errors == null) { errors = new ArrayList<>(0); } return errors; } private char getc(int index) { return (index < length) ? raw.charAt(index) : Patterns.EOF_CHAR; } private void emitInstruction(Instruction inst, boolean preprocessorScope) throws CodeSyntaxException { inst.setLineNumber(instLine + 1); inst.setCharOffset(instOffset + 1); if (preprocessorScope) { inst.setPreprocessScope(); } sink.accept(inst); } private void emitInstruction(Instruction inst) throws CodeSyntaxException { emitInstruction(inst, preprocess); } private boolean emitInvalid() throws CodeSyntaxException { sink.accept(maker.text(new StringView(raw, matcher.start() - 1, matcher.end() + 1))); return true; } /** * Text line numbering is tracked separately from other instructions. */ private void emitText(int start, int end) throws CodeSyntaxException { Instruction inst = maker.text(raw, start, end); inst.setLineNumber(textLine + 1); inst.setCharOffset(textOffset + 1); sink.accept(inst); } private ErrorInfo error(SyntaxErrorType code) { return error(code, 0, false); } private ErrorInfo error(SyntaxErrorType code, boolean textLoc) { return error(code, 0, textLoc); } /** * Include an offset to nudge the error message character offset right to the position of the * error. */ private ErrorInfo error(SyntaxErrorType code, int offset, boolean textLoc) { ErrorInfo info = new ErrorInfo(code); info.code(code); if (textLoc) { info.line(textLine + 1); info.offset(textOffset + 1); } else { info.line(instLine + 1); info.offset(instOffset + 1 + offset); } return info; } private void fail(ErrorInfo info) throws CodeSyntaxException { if (!validate) { throw new CodeSyntaxException(info); } errors.add(info); } /** * Attempt to parse the meta-delimited chars into an Instruction. The start parameter must * point to the location of a '{' character in the raw string. The end parameter must * point 1 character past a '}' character in the raw string, and start must be < end. * * Depending on how the parse fails, this may throw a syntax exception. It all depends * on the degree of ambiguity. * * For example if the starting sequence for an instruction is found and the rest is * invalid, we may throw syntax error. Instead, if the parse is clearly not an instruction * candidate we ignore the entire sequence and emit a text instruction. An example of * the latter is if the first character inside the META_LEFT is whitespace. We require * that instructions begin immediately following the META_LEFT character. * * Package-private to facilitate unit testing. */ private boolean matchMeta(int start, int end) throws CodeSyntaxException { if (!(start < end)) { throw new RuntimeException("Start position should always be less than end. Bug in tokenizer."); } // Start token matching everything between '{' and '}' matcher.region(start + 1, end - 1); // See if the current instruction is scoped to the pre-processor. if (matcher.peek(0, '^')) { // If not in pre-processing mode, skip it. if (!preprocess) { return true; } matcher.seek(1); } else if (preprocess) { // Normal instructions in pre-processing mode are output as text. return false; } // Emit a comment, skipping over the "#". if (matcher.peek(0, '#')) { matcher.seek(1); Instruction comment = maker.comment(raw, matcher.pointer(), matcher.end()); emitInstruction(comment); return true; } return parseKeyword() || parseVariable(); } /** * Attempt to parse the range into a keyword instruction. */ private boolean parseKeyword() throws CodeSyntaxException { if (!matcher.keyword()) { return false; } StringView keyword = matcher.consume(); if (keyword.lastChar() == '?') { Predicate predicate = resolvePredicate(keyword.subview(1, keyword.length())); Arguments args = parsePredicateArguments(predicate); if (args == null) { return emitInvalid(); } emitInstruction(maker.predicate(predicate, args)); return true; } InstructionType type = InstructionTable.get(keyword); if (type == null) { fail(error(INVALID_INSTRUCTION).data(keyword)); return emitInvalid(); } return parseInstruction(type, matcher.pointer(), matcher.end()); } /** * We've found the start of an instruction. Parse the rest of the range. */ private boolean parseInstruction(InstructionType type, int start, int end) throws CodeSyntaxException { switch (type) { case ALTERNATES_WITH: // Look for SPACE "WITH" EOF if (!matcher.space()) { fail(error(SyntaxErrorType.WHITESPACE_EXPECTED).data(matcher.remainder())); return emitInvalid(); } matcher.consume(); if (!matcher.wordWith()) { fail(error(MISSING_WITH_KEYWORD).data(matcher.remainder())); return emitInvalid(); } matcher.consume(); if (!matcher.finished()) { fail(error(EXTRA_CHARS).type(type).data(matcher.remainder())); return emitInvalid(); } emitInstruction(maker.alternates()); return true; case BINDVAR: { if (!skipWhitespace()) { return emitInvalid(); } // Parse the variable name. if (!matcher.localVariable()) { fail(error(BINDVAR_EXPECTS_NAME).data(matcher.remainder())); return emitInvalid(); } String name = matcher.consume().repr(); if (!skipWhitespace()) { return emitInvalid(); } Variables vars = parseVariables(); if (vars == null) { fail(error(MISSING_VARIABLE_NAME).data(matcher.remainder())); return emitInvalid(); } BindVarInst instruction = maker.bindvar(name, vars); List<FormatterCall> formatters = parseFormatters(instruction, start); if (formatters == null) { emitInstruction(instruction); } else if (!formatters.isEmpty()) { instruction.setFormatters(formatters); emitInstruction(instruction); } return true; } case CTXVAR: { if (!skipWhitespace()) { return emitInvalid(); } // Parse the variable name if (!matcher.localVariable()) { fail(error(CTXVAR_EXPECTS_NAME).data(matcher.remainder())); return emitInvalid(); } String name = matcher.consume().repr(); if (!skipWhitespace()) { return emitInvalid(); } List<Binding> bindings = parseBindings(); if (bindings == null) { fail(error(CTXVAR_EXPECTS_BINDINGS).data(matcher.remainder())); return emitInvalid(); } CtxVarInst instruction = maker.ctxvar(name, bindings); emitInstruction(instruction); return true; } case EVAL: { if (!skipWhitespace()) { return emitInvalid(); } // Instruction has a single argument, a free-form expression StringView code = matcher.remainder(); // The expression will be parsed and assembled the first time // the instruction is executed. EvalInst instruction = maker.eval(code.toString()); emitInstruction(instruction); return true; } case END: case META_LEFT: case META_RIGHT: case NEWLINE: case SPACE: case TAB: // Nothing should follow these instructions. if (!matcher.finished()) { fail(error(EXTRA_CHARS).type(type).data(matcher.remainder())); return emitInvalid(); } emitInstruction(maker.simple(type)); return true; case IF: return parseIfExpression(); case INCLUDE: { // this instruction accepts space-delimited arguments if (!matcher.space()) { return emitInvalid(); } // TODO: the line below will never fail since it looks for at least 1 character, // and the matcher is currently pointing at the space above, since it's the // argument delimiter. if (!matcher.arguments()) { return emitInvalid(); } StringView rawArgs = matcher.consume(); Arguments args = new Arguments(rawArgs); if (args.count() < 1) { return emitInvalid(); } IncludeInst instruction = maker.include(args); emitInstruction(instruction); return true; } case INJECT: { if (!skipWhitespace()) { return emitInvalid(); } if (!matcher.localVariable()) { fail(error(INJECT_EXPECTS_NAME).data(matcher.remainder())); return emitInvalid(); } String variable = matcher.consume().repr(); if (!skipWhitespace()) { return emitInvalid(); } if (!matcher.path()) { fail(error(INJECT_EXPECTS_PATH).data(matcher.remainder())); return emitInvalid(); } String path = matcher.consume().repr(); StringView rawArgs = null; Arguments args = Constants.EMPTY_ARGUMENTS; if (matcher.arguments()) { rawArgs = matcher.consume(); args = new Arguments(rawArgs); } InjectInst instruction = maker.inject(variable, path, args); emitInstruction(instruction); return true; } case MACRO: { if (!skipWhitespace()) { return emitInvalid(); } if (!matcher.path()) { fail(error(MACRO_EXPECTS_NAME)); return emitInvalid(); } StringView path = matcher.consume(); if (!matcher.finished()) { fail(error(EXTRA_CHARS).type(type).data(matcher.remainder())); return emitInvalid(); } MacroInst inst = maker.macro(path.repr()); emitInstruction(inst); return true; } case OR_PREDICATE: if (matcher.space()) { matcher.consume(); if (!matcher.predicate()) { fail(error(OR_EXPECTED_PREDICATE).type(type).data(matcher.remainder())); return emitInvalid(); } Predicate predicate = resolvePredicate(matcher.consume()); Arguments args = parsePredicateArguments(predicate); if (args == null) { // Error was emitted by parsePredicateArguments() return emitInvalid(); } PredicateInst inst = maker.predicate(predicate, args); inst.setOr(); emitInstruction(inst); return true; } if (!matcher.finished()) { fail(error(EXTRA_CHARS).type(type).data(matcher.remainder())); return emitInvalid(); } emitInstruction(maker.or()); return true; case REPEATED: case SECTION: return parseSection(type); default: throw new RuntimeException("Resolution failure: instruction type '" + type + "' has no text representation."); } } private boolean skipWhitespace() throws CodeSyntaxException { boolean result = matcher.whitespace(); if (!result) { fail(error(WHITESPACE_EXPECTED).data(matcher.remainder())); } matcher.consume(); return result; } /** * Lookup the keyword in the predicate table, raise an error if unknown. */ private Predicate resolvePredicate(StringView keyword) throws CodeSyntaxException { Predicate predicate = predicateTable.get(keyword); if (predicate == null) { fail(error(PREDICATE_UNKNOWN).data(keyword.repr())); // Emit a dummy predicate with this name. return new BasePredicate(keyword.repr(), false) { }; } return predicate; } /** * After we've resolved a predicate implementation, parse its optional arguments. */ private Arguments parsePredicateArguments(Predicate predicate) throws CodeSyntaxException { StringView rawArgs = null; if (matcher.predicateArgs()) { rawArgs = matcher.consume(); } Arguments args = null; if (rawArgs == null) { args = new Arguments(); if (predicate.requiresArgs()) { fail(error(PREDICATE_NEEDS_ARGS).data(predicate)); return null; } } else { args = new Arguments(rawArgs); } try { predicate.validateArgs(args); } catch (ArgumentsException e) { String identifier = predicate.identifier(); fail(error(PREDICATE_ARGS_INVALID).name(identifier).data(e.getMessage())); return null; } return args; } /** * Parse boolean expression inside an IF instruction. * * NOTE: This does not currently enforce one type of boolean operator in the expression, so you * can mix OR with AND if you want, but there is no operator precedence or parenthesis so the * result may not be what was expected. */ private boolean parseIfExpression() throws CodeSyntaxException { if (!matcher.whitespace()) { fail(error(WHITESPACE_EXPECTED).data(matcher.remainder())); return emitInvalid(); } matcher.consume(); // First, check if this is a predicate expression. If so, parse it. if (matcher.predicate()) { Predicate predicate = resolvePredicate(matcher.consume()); Arguments args = parsePredicateArguments(predicate); if (args == null) { return emitInvalid(); } try { emitInstruction(maker.ifpred(predicate, args)); } catch (ArgumentsException e) { String identifier = predicate.identifier(); fail(error(PREDICATE_ARGS_INVALID).name(identifier).data(e.getMessage())); } return true; } // Otherwise, this is an expression involving variable tests and operators. // If we find N variables, we'll need N-1 operators. List<String> vars = new ArrayList<>(); List<Operator> ops = new ArrayList<>(); int count = 0; while (matcher.variable()) { vars.add(matcher.consume().repr()); if (matcher.whitespace()) { matcher.consume(); } if (count == IF_VARIABLE_LIMIT) { fail(error(IF_TOO_MANY_VARS).limit(IF_VARIABLE_LIMIT)); return emitInvalid(); } count++; if (!matcher.operator()) { break; } Operator op = matcher.consume().repr().equals("&&") ? Operator.LOGICAL_AND : Operator.LOGICAL_OR; ops.add(op); if (matcher.whitespace()) { matcher.consume(); } } if (!matcher.finished()) { fail(error(IF_EXPECTED_VAROP).data(matcher.remainder())); return emitInvalid(); } if (vars.size() == 0) { fail(error(IF_EMPTY)); return emitInvalid(); } if (vars.size() != (ops.size() + 1)) { fail(error(IF_TOO_MANY_OPERATORS)); return emitInvalid(); } emitInstruction(maker.ifexpn(vars, ops)); return true; } /** * Parse a SECTION or REPEATED instruction: * * ".section" VARIABLE * ".repeated section" VARIABLE * */ private boolean parseSection(InstructionType type) throws CodeSyntaxException { if (!matcher.whitespace()) { fail(error(WHITESPACE_EXPECTED).data(matcher.remainder())); return emitInvalid(); } matcher.consume(); if (type == InstructionType.REPEATED) { if (!matcher.wordSection()) { fail(error(MISSING_SECTION_KEYWORD).data(matcher.remainder())); return emitInvalid(); } matcher.consume(); if (!matcher.whitespace()) { fail(error(WHITESPACE_EXPECTED).data(matcher.remainder())); return emitInvalid(); } matcher.consume(); } if (!matcher.variable()) { fail(error(VARIABLE_EXPECTED).data(matcher.remainder())); return emitInvalid(); } StringView variable = matcher.consume(); if (!matcher.finished()) { fail(error(EXTRA_CHARS).type(type).data(matcher.remainder())); return emitInvalid(); } if (type == InstructionType.REPEATED) { emitInstruction(maker.repeated(variable.repr())); } else { emitInstruction(maker.section(variable.repr())); } return true; } /** * Parses a variable reference that can consist of one or more variable names followed * by an optional list of formatters. Formatters can be chained so the output of one * formatter can be "piped" into the next. */ private boolean parseVariable() throws CodeSyntaxException { int start = matcher.matchStart(); Variables vars = parseVariables(); if (vars == null) { return false; } VariableInst instruction = maker.var(vars); List<FormatterCall> formatters = parseFormatters(instruction, start); if (formatters == null) { emitInstruction(instruction); } else if (!formatters.isEmpty()) { instruction.setFormatters(formatters); emitInstruction(instruction); } return true; } /** * Parse one or more variable references. Returns null if an error occurred. */ private Variables parseVariables() throws CodeSyntaxException { if (!matcher.variable()) { return null; } StringView token = matcher.consume(); Variables vars = new Variables(token.repr()); while (matcher.variablesDelimiter()) { matcher.consume(); if (matcher.finished()) { return null; } if (matcher.pipe() || !matcher.variable()) { return null; } vars.add(matcher.consume().repr()); } boolean matchedPipe = matcher.peek(0, '|'); // If we see JavaScript boolean or operator, skip. This would fail anyway // when we tried to parse the second '|' as a formatter name, so since // we've already matched one pipe, sanity-check here. if (matchedPipe && matcher.peek(1, '|')) { return null; } return vars; } private List<Binding> parseBindings() throws CodeSyntaxException { List<Binding> bindings = new ArrayList<>(); while (matcher.word()) { StringView name = matcher.consume(); if (!matcher.equalsign()) { break; } matcher.skip(); if (!matcher.variable()) { break; } Object[] reference = GeneralUtils.splitVariable(matcher.consume().repr()); Binding binding = new Binding(name.repr(), reference); bindings.add(binding); if (!matcher.whitespace()) { break; } matcher.skip(); } return bindings.isEmpty() ? null : bindings; } /** * Parse a formatter chain that may follow either a variable reference * or bind instruction. * * Returns: * null - no PIPE character was seen, so no formatters exist. * <empty List> - we saw a PIPE but encountered an error * <non-empty List> - we parsed a valid list of formatters. */ private List<FormatterCall> parseFormatters(Formattable formattable, int start) throws CodeSyntaxException { List<FormatterCall> formatters = null; while (matcher.pipe()) { matcher.consume(); if (!matcher.formatter()) { fail(error(FORMATTER_INVALID, matcher.pointer() - start, false).name(matcher.remainder())); emitInvalid(); return Collections.emptyList(); } StringView name = matcher.consume(); Formatter formatter = formatterTable.get(name); if (formatter == null) { fail(error(FORMATTER_UNKNOWN, matcher.matchStart() - start, false).name(name)); emitInvalid(); return Collections.emptyList(); } StringView rawArgs = null; if (matcher.arguments()) { rawArgs = matcher.consume(); } Arguments args = Constants.EMPTY_ARGUMENTS; if (formatter.requiresArgs() && rawArgs == null) { fail(error(FORMATTER_NEEDS_ARGS, matcher.matchStart() - start, false).data(formatter)); emitInvalid(); return Collections.emptyList(); } else { args = new Arguments(rawArgs); } try { formatter.validateArgs(args); } catch (ArgumentsException e) { String identifier = formatter.identifier(); fail(error(FORMATTER_ARGS_INVALID, matcher.matchStart() - start, false) .name(identifier) .data(e.getMessage())); emitInvalid(); return Collections.emptyList(); } if (formatters == null) { formatters = new ArrayList<>(2); } formatters.add(new FormatterCall(formatter, args)); } // If the initial matcher.pipe() fails to enter the loop, this indicates an // unexpected character exists after the instruction. if (!matcher.finished()) { emitInvalid(); return Collections.emptyList(); } // If we parsed all the way to the ending meta character, we can return // a valid formatter list. return formatters; } /** * Initial state for the tokenizer machine, representing the outermost parse scope. * This machine is pretty simple as it only needs to track an alternating sequence * of text / instructions. */ private final State stateInitial = new State() { @Override public State transition() throws CodeSyntaxException { while (true) { char ch = getc(index); switch (ch) { case EOF_CHAR: // Input is finished. See if we have any text left to flush. if (save < length) { emitText(save, length); } // A bit niche, but indicate the line number where the EOF occurred. instLine = lineCounter; instOffset = index - lineIndex; emitInstruction(maker.eof()); return stateEOF; case NEWLINE_CHAR: // Keep track of which line we're currently on. lineCounter++; lineIndex = index + 1; break; case META_LEFT_CHAR: instLine = lineCounter; instOffset = index - lineIndex; // Peek ahead to see if this is a multiline comment. if ((getc(index + 1) == '#') && getc(index + 2) == '#') { // Flush any text before the comment. if (save < index) { emitText(save, index); } index += 3; return stateMultilineComment; } // Skip over duplicate META_LEFT characters until we find the last one // before the corresponding META_RIGHT. metaLeft = index; break; case META_RIGHT_CHAR: // We found the right-hand boundary of a potential instruction. if (metaLeft != -1) { // Flush the text leading up to META_RIGHT, then attempt to parse the instruction. if (save < metaLeft) { emitText(save, metaLeft); } if (!matchMeta(metaLeft, index + 1)) { // Nothing looked like a keyword or variable, so treat the sequence as plain text. emitText(metaLeft, index + 1); } metaLeft = -1; } else { // Not an instruction candidate. Treat the entire sequence up to META_RIGHT as plain text. emitText(save, index + 1); } // Set starting line of next instruction. textLine = lineCounter; textOffset = index + 1 - lineIndex; save = index + 1; break; default: break; } index++; } } }; /** * MULTILINE COMMENT state. {## ... ##} */ private final State stateMultilineComment = new State() { @Override public State transition() throws CodeSyntaxException { int start = index; while (true) { char ch = getc(index); switch (ch) { case EOF_CHAR: emitInstruction(maker.mcomment(raw, start, index)); fail(error(SyntaxErrorType.EOF_IN_COMMENT, true)); return stateEOF; case NEWLINE_CHAR: lineCounter++; lineIndex = index + 1; break; case POUND_CHAR: // Look-ahead for ##} sequence to terminate the comment block. if (getc(index + 1) == POUND_CHAR && getc(index + 2) == META_RIGHT_CHAR) { emitInstruction(maker.mcomment(raw, start, index), false); // Skip over multi-line suffix. index += 3; save = index; // Return to outer state. return stateInitial; } break; default: break; } index++; } } }; /** * Terminal state when EOF on the input is reached. */ private final State stateEOF = new State() { @Override public State transition() throws CodeSyntaxException { throw new RuntimeException("Tokenizer should never try to transition from the EOF state. " + "This is either a bug in the state machine or perhaps a tokenizer instance was reused."); } }; interface State { State transition() throws CodeSyntaxException; } }
/* * $Id: TestBean.java,v 1.14 2004/07/10 06:39:38 johnsonr Exp $ */ /* * Copyright 2002-2004 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.beans; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.Map; import java.util.Set; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.BeanFactoryAware; /** * Simple test bean used for testing bean factories, * AOP framework etc. * @author Rod Johnson * @since 15 April 2001 */ public class TestBean implements BeanFactoryAware, ITestBean, IOther, Comparable { private BeanFactory beanFactory; private boolean postProcessed; private String name; private int age; private ITestBean spouse; private String touchy; private String[] stringArray; private Date date = new Date(); private Float myFloat = new Float(0.0); private Collection friends = new LinkedList(); private Set someSet = new HashSet(); private Map someMap = new HashMap(); private INestedTestBean doctor = new NestedTestBean(); private INestedTestBean lawyer = new NestedTestBean(); private IndexedTestBean nestedIndexedBean; public TestBean() { } public TestBean(String name, int age) { this.name = name; this.age = age; } public void setBeanFactory(BeanFactory beanFactory) { this.beanFactory = beanFactory; } public BeanFactory getBeanFactory() { return beanFactory; } public void setPostProcessed(boolean postProcessed) { this.postProcessed = postProcessed; } public boolean isPostProcessed() { return postProcessed; } public String getName() { return name; } public void setName(String name) { this.name = name; } public int getAge() { return age; } public void setAge(int age) { this.age = age; } public ITestBean getSpouse() { return spouse; } public void setSpouse(ITestBean spouse) { this.spouse = spouse; } public String getTouchy() { return touchy; } public void setTouchy(String touchy) throws Exception { if (touchy.indexOf('.') != -1) { throw new Exception("Can't contain a ."); } if (touchy.indexOf(',') != -1) { throw new NumberFormatException("Number format exception: contains a ,"); } this.touchy = touchy; } public String[] getStringArray() { return stringArray; } public void setStringArray(String[] stringArray) { this.stringArray = stringArray; } public Date getDate() { return date; } public void setDate(Date date) { this.date = date; } public Float getMyFloat() { return myFloat; } public void setMyFloat(Float myFloat) { this.myFloat = myFloat; } public Collection getFriends() { return friends; } public void setFriends(Collection friends) { this.friends = friends; } public Set getSomeSet() { return someSet; } public void setSomeSet(Set someSet) { this.someSet = someSet; } public Map getSomeMap() { return someMap; } public void setSomeMap(Map someMap) { this.someMap = someMap; } public INestedTestBean getDoctor() { return doctor; } public INestedTestBean getLawyer() { return lawyer; } public void setDoctor(INestedTestBean bean) { doctor = bean; } public void setLawyer(INestedTestBean bean) { lawyer = bean; } public IndexedTestBean getNestedIndexedBean() { return nestedIndexedBean; } public void setNestedIndexedBean(IndexedTestBean nestedIndexedBean) { this.nestedIndexedBean = nestedIndexedBean; } /** * @see ITestBean#exceptional(Throwable) */ public void exceptional(Throwable t) throws Throwable { if (t != null) throw t; } /** * @see ITestBean#returnsThis() */ public Object returnsThis() { return this; } /** * @see IOther#absquatulate() */ public void absquatulate() { } public int haveBirthday() { return age++; } public boolean equals(Object other) { if (this == other) return true; if (other == null || !(other instanceof TestBean)) return false; TestBean tb2 = (TestBean) other; if (tb2.age != age) return false; if (name == null) return tb2.name == null; if (!tb2.name.equals(name)) return false; return true; } public int compareTo(Object other) { if (this.name != null && other instanceof TestBean) { return this.name.compareTo(((TestBean) other).getName()); } else { return 1; } } public String toString() { String s = "name=" + name + "; age=" + age + "; touchy=" + touchy; s += "; spouse={" + (spouse != null ? spouse.getName() : null) + "}"; return s; } }
/* * Copyright (c) 2007 Adobe Systems Incorporated * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * */ package com.adobe.epubcheck.opf; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Hashtable; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Queue; import java.util.Set; import java.util.regex.Pattern; import com.adobe.epubcheck.api.EPUBLocation; import com.adobe.epubcheck.api.LocalizableReport; import com.adobe.epubcheck.api.Report; import com.adobe.epubcheck.messages.LocalizedMessages; import com.adobe.epubcheck.messages.MessageId; import com.adobe.epubcheck.ocf.OCFPackage; import com.adobe.epubcheck.util.EPUBVersion; import com.adobe.epubcheck.util.FeatureEnum; import com.adobe.epubcheck.util.PathUtil; import com.adobe.epubcheck.vocab.PackageVocabs; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.Sets; public class XRefChecker { public static enum Type { GENERIC, FONT, HYPERLINK, LINK, IMAGE, OBJECT, STYLESHEET, AUDIO, VIDEO, SVG_PAINT, SVG_CLIP_PATH, SVG_SYMBOL, REGION_BASED_NAV, SEARCH_KEY, NAV_TOC_LINK, NAV_PAGELIST_LINK, OVERLAY_TEXT_LINK, PICTURE_SOURCE, PICTURE_SOURCE_FOREIGN; } private static class Reference { public final String source; public final int lineNumber; public final int columnNumber; public final String value; public final String refResource; public final String fragment; public final Type type; public Reference(String srcResource, int srcLineNumber, int srcColumnNumber, String value, String refResource, String fragment, Type type) { this.source = srcResource; this.lineNumber = srcLineNumber; this.columnNumber = srcColumnNumber; this.value = value; this.refResource = refResource; this.fragment = fragment; this.type = type; } } private static class Anchor { @SuppressWarnings("unused") public final String id; public final Type type; public final int position; public Anchor(String id, int position, Type type) { this.id = id; this.position = position; this.type = type; } } private static class Resource { public final OPFItem item; public final Hashtable<String, Anchor> anchors; public final boolean hasValidItemFallback; public final boolean hasValidImageFallback; Resource(OPFItem item, boolean hasValidItemFallback, boolean hasValidImageFallback) { this.item = item; this.hasValidItemFallback = hasValidItemFallback; this.hasValidImageFallback = hasValidImageFallback; this.anchors = new Hashtable<String, Anchor>(); } /** * Returns the position of the given ID in the document represented by this * resource. * * @return {@code -1} if the ID wasn't found in the document, or {@code 0} if * the given ID is {@code null} or an empty string, or the 1-based * position of the ID otherwise. */ public int getAnchorPosition(String id) { if (id == null || id.trim().isEmpty()) return 0; Anchor anchor = anchors.get(id); return (anchor != null) ? anchor.position : -1; } } private static final Pattern REGEX_SVG_VIEW = Pattern.compile("svgView\\(.*\\)"); private final Map<String, Resource> resources = new HashMap<String, Resource>(); private final HashSet<String> undeclared = new HashSet<String>(); private final List<Reference> references = new LinkedList<Reference>(); private final Map<String, String> bindings = new HashMap<String, String>(); private final Report report; private final OCFPackage ocf; private final EPUBVersion version; private final Locale locale; public XRefChecker(OCFPackage ocf, Report report, EPUBVersion version) { this.ocf = ocf; this.report = report; this.version = version; this.locale = (report instanceof LocalizableReport) ? ((LocalizableReport) report).getLocale() : Locale.ENGLISH; } public String getMimeType(String path) { return resources.get(path) != null ? resources.get(path).item.getMimeType() : null; } /** * Returns an {@link Optional} containing a boolean indicating whether the * resource at the given path has a valid item fallback, or * {@link Optional#absent()} if no resource has been registered for the given * path. */ public Optional<Boolean> hasValidFallback(String path) { return resources.get(path) != null ? Optional.of(resources.get(path).hasValidItemFallback) : Optional.<Boolean> absent(); } /** * Returns an {@link Optional} containing the Package Document item for the * given Publication Resource path, or {@link Optional#absent()} if no resource * has been registered for the given path. */ public Optional<OPFItem> getResource(String path) { return (path == null || !resources.containsKey(path)) ? Optional.<OPFItem> absent() : Optional.of(resources.get(path).item); } /** * Returns set (possibly multiple) types of refereences to the given resource * * @param path * the path to a publication resource * @return an immutable {@link EnumSet} containing the types of references to * {@code path}. */ public Set<Type> getTypes(String path) { LinkedList<Type> types = new LinkedList<>(); for (Reference reference : references) { if (Preconditions.checkNotNull(path).equals(reference.refResource)) { types.add(reference.type); } } return Sets.immutableEnumSet(types); } public Set<String> getBindingsMimeTypes() { return bindings.keySet(); } public String getBindingHandlerId(String mimeType) { return bindings.get(mimeType); } public void registerBinding(String mimeType, String handlerId) { bindings.put(mimeType, handlerId); } public void registerResource(OPFItem item, boolean hasValidItemFallback, boolean hasValidImageFallback) { // Note: Duplicate manifest items are already checked in OPFChecker. if (!resources.containsKey(item.getPath())) { resources.put(item.getPath(), new Resource(item, hasValidItemFallback, hasValidImageFallback)); } } public void registerAnchor(String path, int lineNumber, int columnNumber, String id, Type type) { Resource res = Preconditions.checkNotNull(resources.get(path)); // Note: duplicate IDs are checked in schematron if (!res.anchors.contains(id)) { res.anchors.put(id, new Anchor(id, res.anchors.size() + 1, type)); } } public void registerReference(String srcResource, int srcLineNumber, int srcColumnNumber, String ref, Type type) { if (ref.startsWith("data:")) { return; } // see http://code.google.com/p/epubcheck/issues/detail?id=190 // see http://code.google.com/p/epubcheck/issues/detail?id=261 int query = ref.indexOf('?'); if (query >= 0 && !PathUtil.isRemote(ref)) { ref = ref.substring(0, query).trim(); } String refResource = PathUtil.removeFragment(ref); String refFragment = PathUtil.getFragment(ref); report.info(srcResource, FeatureEnum.RESOURCE, refResource); references.add(new Reference(srcResource, srcLineNumber, srcColumnNumber, ref, refResource, refFragment, type)); } public void checkReferences() { // if (checkReference(reference)) checkReferenceSubtypes(reference); Queue<Reference> tocLinks = new LinkedList<>(); Queue<Reference> pageListLinks = new LinkedList<>(); Queue<Reference> overlayLinks = new LinkedList<>(); for (Reference reference : references) { switch (reference.type) { case REGION_BASED_NAV: checkRegionBasedNav(reference); break; case NAV_TOC_LINK: tocLinks.add(reference); break; case NAV_PAGELIST_LINK: pageListLinks.add(reference); break; case OVERLAY_TEXT_LINK: overlayLinks.add(reference); break; default: checkReference(reference); break; } } checkReadingOrder(tocLinks, -1, -1); checkReadingOrder(pageListLinks, -1, -1); checkReadingOrder(overlayLinks, -1, -1); } private void checkReference(Reference ref) { Resource res = resources.get(ref.refResource); Resource host = resources.get(ref.source); // Check remote resources if (PathUtil.isRemote(ref.refResource) // remote links and hyperlinks are not Publication Resources && !EnumSet.of(Type.LINK, Type.HYPERLINK).contains(ref.type) // spine items are checked in OPFChecker30 && !(version == EPUBVersion.VERSION_3 && res != null && res.item.isInSpine()) // audio, video, and fonts can be remote resources in EPUB 3 && !(version == EPUBVersion.VERSION_3 && EnumSet.of(Type.AUDIO, Type.VIDEO, Type.FONT).contains(ref.type))) { report.message(MessageId.RSC_006, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber, ref.refResource)); return; } // Check undeclared resources if (res == null) { // Report references to missing local resources if (!ocf.hasEntry(ref.refResource) && !PathUtil.isRemote(ref.refResource)) { // only as a WARNING for 'link' references in EPUB 3 if (version == EPUBVersion.VERSION_3 && ref.type == Type.LINK) { report.message(MessageId.RSC_007w, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber, ref.refResource), ref.refResource); } // by default as an ERROR else { report.message(MessageId.RSC_007, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber, ref.refResource), ref.refResource); } } // Report undeclared Publication Resources (once) else if (!undeclared.contains(ref.refResource) // links and remote hyperlinks are not Publication Resources && !(ref.type == Type.LINK || PathUtil.isRemote(ref.refResource) && ref.type == Type.HYPERLINK)) { undeclared.add(ref.refResource); report.message(MessageId.RSC_008, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber, ref.refResource), ref.refResource); } return; } // Type-specific checks switch (ref.type) { case HYPERLINK: // if mimeType is null, we should have reported an error already if (!OPFChecker.isBlessedItemType(res.item.getMimeType(), version) && !OPFChecker.isDeprecatedBlessedItemType(res.item.getMimeType()) && !res.hasValidItemFallback) { report.message(MessageId.RSC_010, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber, ref.refResource + ((ref.fragment != null) ? '#' + ref.fragment : ""))); return; } if (/* !res.mimeType.equals("font/opentype") && */!res.item.isInSpine()) { report.message(MessageId.RSC_011, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber, ref.refResource + ((ref.fragment != null) ? '#' + ref.fragment : ""))); return; } break; case IMAGE: case PICTURE_SOURCE: case PICTURE_SOURCE_FOREIGN: if (ref.fragment != null && !res.item.getMimeType().equals("image/svg+xml")) { report.message(MessageId.RSC_009, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber, ref.refResource + "#" + ref.fragment)); return; } // if mimeType is null, we should have reported an error already if (!OPFChecker.isBlessedImageType(res.item.getMimeType())) { if (version == EPUBVersion.VERSION_3 && ref.type == Type.PICTURE_SOURCE) { report.message(MessageId.MED_007, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber), ref.refResource, res.item.getMimeType()); return; } else if (ref.type == Type.IMAGE && !res.hasValidImageFallback) { report.message(MessageId.MED_003, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber), ref.refResource, res.item.getMimeType()); } } break; case SEARCH_KEY: // TODO update when we support EPUB CFI if ((ref.fragment == null || !ref.fragment.startsWith("epubcfi(")) && !res.item.isInSpine()) { report.message(MessageId.RSC_021, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber), ref.refResource); return; } break; case STYLESHEET: if (ref.fragment != null) { report.message(MessageId.RSC_013, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber, ref.refResource + "#" + ref.fragment)); return; } // if mimeType is null, we should have reported an error already // Implementations are allowed to process any stylesheet // language they desire; so this is an // error only if no fallback is available. // Since the presence of a 'text/css' stylesheet link can be considered // a valid "built-in" fallback for a non-standard stylesheet (e.g. // XPGT), the fallback chain test is performed in OPSHandler instead. // See also: // https://github.com/IDPF/epubcheck/issues/244 // https://github.com/IDPF/epubcheck/issues/271 // https://github.com/IDPF/epubcheck/issues/541 break; case SVG_CLIP_PATH: case SVG_PAINT: case SVG_SYMBOL: if (ref.fragment == null) { report.message(MessageId.RSC_015, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber, ref.refResource)); return; } break; default: break; } // Fragment integrity checks if (ref.fragment != null) { // EPUB CFI if (ref.fragment.startsWith("epubcfi(")) { // FIXME epubcfi currently not supported (see issue 150). return; } // Media fragments in Data Navigation Documents else if (ref.fragment.contains("=") && host != null && host.item.getProperties() .contains(PackageVocabs.ITEM_VOCAB.get(PackageVocabs.ITEM_PROPERTIES.DATA_NAV))) { // Ignore, return; } // SVG view fragments are ignored else if (res.item.getMimeType().equals("image/svg+xml") && REGEX_SVG_VIEW.matcher(ref.fragment).matches()) { return; } // Fragment Identifier (by default) else if (!PathUtil.isRemote(ref.refResource)) { Anchor anchor = res.anchors.get(ref.fragment); if (anchor == null) { report.message(MessageId.RSC_012, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber, ref.refResource + "#" + ref.fragment)); return; } switch (ref.type) { case SVG_PAINT: case SVG_CLIP_PATH: if (anchor.type != ref.type) { report.message(MessageId.RSC_014, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber, ref.refResource + "#" + ref.fragment)); return; } break; case SVG_SYMBOL: case HYPERLINK: if (anchor.type != ref.type && anchor.type != Type.GENERIC) { report.message(MessageId.RSC_014, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber, ref.refResource + "#" + ref.fragment)); return; } break; default: break; } } } } private void checkRegionBasedNav(Reference ref) { Preconditions.checkArgument(ref.type == Type.REGION_BASED_NAV); Resource res = resources.get(ref.refResource); if (!res.item.isFixedLayout()) { report.message(MessageId.NAV_009, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber)); } } private void checkReadingOrder(Queue<Reference> references, int lastSpinePosition, int lastAnchorPosition) { // de-queue Reference ref = references.poll(); if (ref == null) return; Preconditions .checkArgument(ref.type == Type.NAV_PAGELIST_LINK || ref.type == Type.NAV_TOC_LINK || ref.type == Type.OVERLAY_TEXT_LINK); Resource res = resources.get(ref.refResource); // abort early if the link target is not a spine item (checked elsewhere) if (res == null || !res.item.isInSpine()) return; // check that the link is in spine order int targetSpinePosition = res.item.getSpinePosition(); if (targetSpinePosition < lastSpinePosition) { String orderContext = LocalizedMessages.getInstance(locale).getSuggestion(MessageId.NAV_011, "spine"); if (ref.type == Type.OVERLAY_TEXT_LINK) { report.message(MessageId.MED_015, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber), ref.value, orderContext); } else { report.message(MessageId.NAV_011, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber), (ref.type == Type.NAV_TOC_LINK) ? "toc" : "page-list", ref.value, orderContext); report.message(MessageId.INF_001, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber), "https://github.com/w3c/publ-epub-revision/issues/1283"); } lastSpinePosition = targetSpinePosition; lastAnchorPosition = -1; } else { // if new spine item, reset last positions if (targetSpinePosition > lastSpinePosition) { lastSpinePosition = targetSpinePosition; lastAnchorPosition = -1; } // check that the fragment is in document order int targetAnchorPosition = res.getAnchorPosition(ref.fragment); if (targetAnchorPosition < lastAnchorPosition) { String orderContext = LocalizedMessages.getInstance(locale).getSuggestion(MessageId.NAV_011, "document"); if (ref.type == Type.OVERLAY_TEXT_LINK) { report.message(MessageId.MED_015, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber), ref.value, orderContext); } else { report.message(MessageId.NAV_011, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber), (ref.type == Type.NAV_TOC_LINK) ? "toc" : "page-list", ref.value, orderContext); report.message(MessageId.INF_001, EPUBLocation.create(ref.source, ref.lineNumber, ref.columnNumber), "https://github.com/w3c/publ-epub-revision/issues/1283"); } } lastAnchorPosition = targetAnchorPosition; } checkReadingOrder(references, lastSpinePosition, lastAnchorPosition); } }
/* Copyright 2014 OPM.gov Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package gov.opm.scrd.entities.application; import gov.opm.scrd.entities.common.IdentifiableEntity; import gov.opm.scrd.entities.lookup.AccountStatus; import gov.opm.scrd.entities.lookup.ApplicationDesignation; import gov.opm.scrd.entities.lookup.ApprovalStatus; import gov.opm.scrd.entities.lookup.PaymentAppliance; import gov.opm.scrd.entities.lookup.PaymentStatus; import gov.opm.scrd.entities.lookup.PaymentType; import java.math.BigDecimal; import java.util.Date; /** * <p> * This is the class representing the performed payment of the account. This is a consolidated persistence entity for * all the payments used in the application. There can be ordinary payment or specific payment view. The payment view * information is set in @see PaymentType. Additional payment views are @see SuspendedPayment, @see PaymentMove, @see * InterestAdjustment, @see PendingPayment. * </p> * * <p> * <em>Changes in 1.1 (OPM - Data Services - Account and Payment Services Assembly 1.0):</em> * <ol> * <li>Added field approvalReason.</li> * <li>Changed claimNumber to String type.</li> * </ol> * </p> * * <p> * <em>Changes in 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0):</em> * <ul> * <li>Added fields: govRefund, disapprove, historyPayment, resolvedSuspense, userInserted, postFlag, orderCode, * statusCode</li> * </ul> * </p> * * <p> * <strong>Thread Safety: </strong> This class is mutable and not thread safe. * </p> * * Changed in OPM - Frontend - Payments Module Assembly * Add the PaymentAppliance field. * * @author faeton, sparemax, woodjhon * @version 1.2 */ public class Payment extends IdentifiableEntity { /** * <p> * Represents the batch number of payment. It is managed with a getter and setter. It may have any value. It is * fully mutable. * </p> */ private String batchNumber; /** * <p> * Represents the block number of payment. It is managed with a getter and setter. It may have any value. It is * fully mutable. * </p> */ private String blockNumber; /** * <p> * Represents the sequence number of payment. It is managed with a getter and setter. It may have any value. It is * fully mutable. * </p> */ private String sequenceNumber; /** * <p> * Represents the status of payment. It is managed with a getter and setter. It may have any value. It is fully * mutable. * </p> */ private PaymentStatus paymentStatus; /** * <p> * Represents the claim number of payment. It is managed with a getter and setter. It may have any value. It is * fully mutable. * </p> */ private String claimNumber; /** * <p> * Represents the birth date of account holder with this payment. It is managed with a getter and setter. It may * have any value. It is fully mutable. * </p> */ private Date accountHolderBirthdate; /** * <p> * Represents the deposit date of payment. It is managed with a getter and setter. It may have any value. It is * fully mutable. * </p> */ private Date depositDate; /** * <p> * Represents the amount of payment. It is managed with a getter and setter. It may have any value. It is fully * mutable. * </p> */ private BigDecimal amount; /** * <p> * Represents the social security number of account holder with this payment. It is managed with a getter and * setter. It may have any value. It is fully mutable. * </p> */ private String ssn; /** * <p> * Represents the claimant of the payment. It is managed with a getter and setter. It may have any value. It is * fully mutable. * </p> */ private String claimant; /** * <p> * Represents the birth date of claimant of the payment. It is managed with a getter and setter. It may have any * value. It is fully mutable. * </p> */ private Date claimantBirthdate; /** * <p> * Represents the id of import operation of the payment. It is managed with a getter and setter. It may have any * value. It is fully mutable. * </p> */ private String importId; /** * <p> * Represents the sequence index of the payment. It is managed with a getter and setter. It may have any value. It * is fully mutable. * </p> */ private int sequence; /** * <p> * Represents the transaction date of payment. It is managed with a getter and setter. It may have any value. It is * fully mutable. * </p> */ private Date transactionDate; /** * <p> * Represents the status date of payment. It is managed with a getter and setter. It may have any value. It is fully * mutable. * </p> */ private Date statusDate; /** * <p> * Represents the application designation of the payment. It is managed with a getter and setter. It may have any * value. It is fully mutable. * </p> */ private ApplicationDesignation applyTo; /** * <p> * Represents the flag specifying whether the payment should be applied to GL file. It is managed with a getter and * setter. It may have any value. It is fully mutable. * </p> */ private boolean applyToGL; /** * <p> * Represents the note of the payment. It is managed with a getter and setter. It may have any value. It is fully * mutable. * </p> */ private String note; /** * <p> * Represents the transaction key of the payment. It is managed with a getter and setter. It may have any value. It * is fully mutable. * </p> */ private String transactionKey; /** * <p> * Represents the flag specifying whether this payment is ACH. It is managed with a getter and setter. It may have * any value. It is fully mutable. * </p> */ private boolean ach; /** * <p> * Represents the balance of the payment. It is managed with a getter and setter. It may have any value. It is fully * mutable. * </p> */ private BigDecimal accountBalance; /** * <p> * Represents the account status of the payment. It is managed with a getter and setter. It may have any value. It * is fully mutable. * </p> */ private AccountStatus accountStatus; /** * <p> * Represents the claim number of master account of the payment. It is managed with a getter and setter. It may have * any value. It is fully mutable. * </p> */ private String masterClaimNumber; /** * <p> * Represents the birth date of claimant of the payment. It is managed with a getter and setter. It may have any * value. It is fully mutable. * </p> */ private Date masterClaimantBirthdate; /** * <p> * Represents the status of master account of the payment. It is managed with a getter and setter. It may have any * value. It is fully mutable. * </p> */ private AccountStatus masterAccountStatus; /** * <p> * Represents the balance of master account of the payment. It is managed with a getter and setter. It may have any * value. It is fully mutable. * </p> */ private BigDecimal masterAccountBalance; /** * <p> * Represents the balance of master account of the payment. It is managed with a getter and setter. It may have any * value. It is fully mutable. * </p> */ private Long masterAccountId; /** * <p> * Represents the predeposit amount of the payment. It is managed with a getter and setter. It may have any value. * It is fully mutable. * </p> */ private BigDecimal preDepositAmount; /** * <p> * Represents the preredeposit amount of the payment. It is managed with a getter and setter. It may have any value. * It is fully mutable. * </p> */ private BigDecimal preRedepositAmount; /** * <p> * Represents the post deposit amount of the payment. It is managed with a getter and setter. It may have any value. * It is fully mutable. * </p> */ private BigDecimal postDepositAmount; /** * <p> * Represents the post redeposit amount of the payment. It is managed with a getter and setter. It may have any * value. It is fully mutable. * </p> */ private BigDecimal postRedepositAmount; /** * <p> * Represents the user who approved the payment. It is managed with a getter and setter. It may have any value. It * is fully mutable. * </p> */ private String approvalUser; /** * <p> * Represents the approval status of the payment. It is managed with a getter and setter. It may have any value. It * is fully mutable. * </p> */ private ApprovalStatus approvalStatus; /** * <p> * Represents the approval reason of the payment. It is managed with a getter and setter. It may have any value. It * is fully mutable. * </p> */ private String approvalReason; /** * <p> * Represents the type of the payment. It is managed with a getter and setter. It may have any value. It is fully * mutable. * </p> */ private PaymentType paymentType; /** * <p> * Represents the id of account of the payment. It is managed with a getter and setter. It may have any value. It is * fully mutable. * </p> */ private Long accountId; /** * <p> * Represents the flag specifying whether payment is government refund. It is managed with a getter and setter. It * may have any value. It is fully mutable. * </p> * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ private Boolean govRefund; /** * <p> * Represents the flag specifying whether payment is disapproved. It is managed with a getter and setter. It may * have any value. It is fully mutable. * </p> * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ private Boolean disapprove; /** * <p> * Represents the flag specifying whether payment is history. It is managed with a getter and setter. It may have * any value. It is fully mutable. * </p> * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ private Boolean historyPayment; /** * <p> * Represents the flag specifying whether payment has resolved suspense. It is managed with a getter and setter. It * may have any value. It is fully mutable. * </p> * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ private Boolean resolvedSuspense; /** * <p> * Represents the flag specifying whether payment is user inserted. It is managed with a getter and setter. It may * have any value. It is fully mutable. * </p> * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ private Boolean userInserted; /** * <p> * Represents the flag specifying whether payment is post. It is managed with a getter and setter. It may have any * value. It is fully mutable. * </p> * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ private Boolean postFlag; /** * <p> * Represents the order code of payment. It is managed with a getter and setter. It may have any value. It is fully * mutable. * </p> * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ private PaymentsAppliedOrderCode orderCode; /** * <p> * Represents the status code of payment. It is managed with a getter and setter. It may have any value. It is fully * mutable. * </p> * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ private PayTransStatusCode statusCode; /** * The payment appliance field. * */ private PaymentAppliance paymentAppliance; /** * Creates an instance of Payment. */ public Payment() { // Empty } /** * Get the payment appliance. * * @return the payment appliance */ public PaymentAppliance getPaymentAppliance() { return this.paymentAppliance; } /** * Set the payment appliance. * * @param paymentAppliance to set. */ public void setPaymentAppliance(PaymentAppliance paymentAppliance) { this.paymentAppliance = paymentAppliance; } /** * Gets the batch number of payment. * * @return the batch number of payment. */ public String getBatchNumber() { return batchNumber; } /** * Sets the batch number of payment. * * @param batchNumber * the batch number of payment. */ public void setBatchNumber(String batchNumber) { this.batchNumber = batchNumber; } /** * Gets the block number of payment. * * @return the block number of payment. */ public String getBlockNumber() { return blockNumber; } /** * Sets the block number of payment. * * @param blockNumber * the block number of payment. */ public void setBlockNumber(String blockNumber) { this.blockNumber = blockNumber; } /** * Gets the sequence number of payment. * * @return the sequence number of payment. */ public String getSequenceNumber() { return sequenceNumber; } /** * Sets the sequence number of payment. * * @param sequenceNumber * the sequence number of payment. */ public void setSequenceNumber(String sequenceNumber) { this.sequenceNumber = sequenceNumber; } /** * Gets the status of payment. * * @return the status of payment. */ public PaymentStatus getPaymentStatus() { return paymentStatus; } /** * Sets the status of payment. * * @param paymentStatus * the status of payment. */ public void setPaymentStatus(PaymentStatus paymentStatus) { this.paymentStatus = paymentStatus; } /** * Gets the claim number of payment. * * @return the claim number of payment. */ public String getClaimNumber() { return claimNumber; } /** * Sets the claim number of payment. * * @param claimNumber * the claim number of payment. */ public void setClaimNumber(String claimNumber) { this.claimNumber = claimNumber; } /** * Gets the birth date of account holder with this payment. * * @return the birth date of account holder with this payment. */ public Date getAccountHolderBirthdate() { return accountHolderBirthdate; } /** * Sets the birth date of account holder with this payment. * * @param accountHolderBirthdate * the birth date of account holder with this payment. */ public void setAccountHolderBirthdate(Date accountHolderBirthdate) { this.accountHolderBirthdate = accountHolderBirthdate; } /** * Gets the deposit date of payment. * * @return the deposit date of payment. */ public Date getDepositDate() { return depositDate; } /** * Sets the deposit date of payment. * * @param depositDate * the deposit date of payment. */ public void setDepositDate(Date depositDate) { this.depositDate = depositDate; } /** * Gets the amount of payment. * * @return the amount of payment. */ public BigDecimal getAmount() { return amount; } /** * Sets the amount of payment. * * @param amount * the amount of payment. */ public void setAmount(BigDecimal amount) { this.amount = amount; } /** * Gets the social security number of account holder with this payment. * * @return the social security number of account holder with this payment. */ public String getSsn() { return ssn; } /** * Sets the social security number of account holder with this payment. * * @param ssn * the social security number of account holder with this payment. */ public void setSsn(String ssn) { this.ssn = ssn; } /** * Gets the claimant of the payment. * * @return the claimant of the payment. */ public String getClaimant() { return claimant; } /** * Sets the claimant of the payment. * * @param claimant * the claimant of the payment. */ public void setClaimant(String claimant) { this.claimant = claimant; } /** * Gets the birth date of claimant of the payment. * * @return the birth date of claimant of the payment. */ public Date getClaimantBirthdate() { return claimantBirthdate; } /** * Sets the birth date of claimant of the payment. * * @param claimantBirthdate * the birth date of claimant of the payment. */ public void setClaimantBirthdate(Date claimantBirthdate) { this.claimantBirthdate = claimantBirthdate; } /** * Gets the id of import operation of the payment. * * @return the id of import operation of the payment. */ public String getImportId() { return importId; } /** * Sets the id of import operation of the payment. * * @param importId * the id of import operation of the payment. */ public void setImportId(String importId) { this.importId = importId; } /** * Gets the sequence index of the payment. * * @return the sequence index of the payment. */ public int getSequence() { return sequence; } /** * Sets the sequence index of the payment. * * @param sequence * the sequence index of the payment. */ public void setSequence(int sequence) { this.sequence = sequence; } /** * Gets the transaction date of payment. * * @return the transaction date of payment. */ public Date getTransactionDate() { return transactionDate; } /** * Sets the transaction date of payment. * * @param transactionDate * the transaction date of payment. */ public void setTransactionDate(Date transactionDate) { this.transactionDate = transactionDate; } /** * Gets the status date of payment. * * @return the status date of payment. */ public Date getStatusDate() { return statusDate; } /** * Sets the status date of payment. * * @param statusDate * the status date of payment. */ public void setStatusDate(Date statusDate) { this.statusDate = statusDate; } /** * Gets the application designation of the payment. * * @return the application designation of the payment. */ public ApplicationDesignation getApplyTo() { return applyTo; } /** * Sets the application designation of the payment. * * @param applyTo * the application designation of the payment. */ public void setApplyTo(ApplicationDesignation applyTo) { this.applyTo = applyTo; } /** * Gets the flag specifying whether the payment should be applied to GL file. * * @return the flag specifying whether the payment should be applied to GL file. */ public boolean isApplyToGL() { return applyToGL; } /** * Sets the flag specifying whether the payment should be applied to GL file. * * @param applyToGL * the flag specifying whether the payment should be applied to GL file. */ public void setApplyToGL(boolean applyToGL) { this.applyToGL = applyToGL; } /** * Gets the note of the payment. * * @return the note of the payment. */ public String getNote() { return note; } /** * Sets the note of the payment. * * @param note * the note of the payment. */ public void setNote(String note) { this.note = note; } /** * Gets the transaction key of the payment. * * @return the transaction key of the payment. */ public String getTransactionKey() { return transactionKey; } /** * Sets the transaction key of the payment. * * @param transactionKey * the transaction key of the payment. */ public void setTransactionKey(String transactionKey) { this.transactionKey = transactionKey; } /** * Gets the flag specifying whether this payment is ACH. * * @return the flag specifying whether this payment is ACH. */ public boolean isAch() { return ach; } /** * Sets the flag specifying whether this payment is ACH. * * @param ach * the flag specifying whether this payment is ACH. */ public void setAch(boolean ach) { this.ach = ach; } /** * Gets the balance of the payment. * * @return the balance of the payment. */ public BigDecimal getAccountBalance() { return accountBalance; } /** * Sets the balance of the payment. * * @param accountBalance * the balance of the payment. */ public void setAccountBalance(BigDecimal accountBalance) { this.accountBalance = accountBalance; } /** * Gets the account status of the payment. * * @return the account status of the payment. */ public AccountStatus getAccountStatus() { return accountStatus; } /** * Sets the account status of the payment. * * @param accountStatus * the account status of the payment. */ public void setAccountStatus(AccountStatus accountStatus) { this.accountStatus = accountStatus; } /** * Gets the claim number of master account of the payment. * * @return the claim number of master account of the payment. */ public String getMasterClaimNumber() { return masterClaimNumber; } /** * Sets the claim number of master account of the payment. * * @param masterClaimNumber * the claim number of master account of the payment. */ public void setMasterClaimNumber(String masterClaimNumber) { this.masterClaimNumber = masterClaimNumber; } /** * Gets the birth date of claimant of the payment. * * @return the birth date of claimant of the payment. */ public Date getMasterClaimantBirthdate() { return masterClaimantBirthdate; } /** * Sets the birth date of claimant of the payment. * * @param masterClaimantBirthdate * the birth date of claimant of the payment. */ public void setMasterClaimantBirthdate(Date masterClaimantBirthdate) { this.masterClaimantBirthdate = masterClaimantBirthdate; } /** * Gets the status of master account of the payment. * * @return the status of master account of the payment. */ public AccountStatus getMasterAccountStatus() { return masterAccountStatus; } /** * Sets the status of master account of the payment. * * @param masterAccountStatus * the status of master account of the payment. */ public void setMasterAccountStatus(AccountStatus masterAccountStatus) { this.masterAccountStatus = masterAccountStatus; } /** * Gets the balance of master account of the payment. * * @return the balance of master account of the payment. */ public BigDecimal getMasterAccountBalance() { return masterAccountBalance; } /** * Sets the balance of master account of the payment. * * @param masterAccountBalance * the balance of master account of the payment. */ public void setMasterAccountBalance(BigDecimal masterAccountBalance) { this.masterAccountBalance = masterAccountBalance; } /** * Gets the balance of master account of the payment. * * @return the balance of master account of the payment. */ public Long getMasterAccountId() { return masterAccountId; } /** * Sets the balance of master account of the payment. * * @param masterAccountId * the balance of master account of the payment. */ public void setMasterAccountId(Long masterAccountId) { this.masterAccountId = masterAccountId; } /** * Gets the predeposit amount of the payment. * * @return the predeposit amount of the payment. */ public BigDecimal getPreDepositAmount() { return preDepositAmount; } /** * Sets the predeposit amount of the payment. * * @param preDepositAmount * the predeposit amount of the payment. */ public void setPreDepositAmount(BigDecimal preDepositAmount) { this.preDepositAmount = preDepositAmount; } /** * Gets the preredeposit amount of the payment. * * @return the preredeposit amount of the payment. */ public BigDecimal getPreRedepositAmount() { return preRedepositAmount; } /** * Sets the preredeposit amount of the payment. * * @param preRedepositAmount * the preredeposit amount of the payment. */ public void setPreRedepositAmount(BigDecimal preRedepositAmount) { this.preRedepositAmount = preRedepositAmount; } /** * Gets the post deposit amount of the payment. * * @return the post deposit amount of the payment. */ public BigDecimal getPostDepositAmount() { return postDepositAmount; } /** * Sets the post deposit amount of the payment. * * @param postDepositAmount * the post deposit amount of the payment. */ public void setPostDepositAmount(BigDecimal postDepositAmount) { this.postDepositAmount = postDepositAmount; } /** * Gets the post redeposit amount of the payment. * * @return the post redeposit amount of the payment. */ public BigDecimal getPostRedepositAmount() { return postRedepositAmount; } /** * Sets the post redeposit amount of the payment. * * @param postRedepositAmount * the post redeposit amount of the payment. */ public void setPostRedepositAmount(BigDecimal postRedepositAmount) { this.postRedepositAmount = postRedepositAmount; } /** * Gets the user who approved the payment. * * @return the user who approved the payment. */ public String getApprovalUser() { return approvalUser; } /** * Sets the user who approved the payment. * * @param approvalUser * the user who approved the payment. */ public void setApprovalUser(String approvalUser) { this.approvalUser = approvalUser; } /** * Gets the approval status of the payment. * * @return the approval status of the payment. */ public ApprovalStatus getApprovalStatus() { return approvalStatus; } /** * Sets the approval status of the payment. * * @param approvalStatus * the approval status of the payment. */ public void setApprovalStatus(ApprovalStatus approvalStatus) { this.approvalStatus = approvalStatus; } /** * Gets the approval reason of the payment. * * @return the approval reason of the payment. */ public String getApprovalReason() { return approvalReason; } /** * Sets the approval reason of the payment. * * @param approvalReason * the approval reason of the payment. */ public void setApprovalReason(String approvalReason) { this.approvalReason = approvalReason; } /** * Gets the type of the payment. * * @return the type of the payment. */ public PaymentType getPaymentType() { return paymentType; } /** * Sets the type of the payment. * * @param paymentType * the type of the payment. */ public void setPaymentType(PaymentType paymentType) { this.paymentType = paymentType; } /** * Gets the id of account of the payment. * * @return the id of account of the payment. */ public Long getAccountId() { return accountId; } /** * Sets the id of account of the payment. * * @param accountId * the id of account of the payment. */ public void setAccountId(Long accountId) { this.accountId = accountId; } /** * Gets the flag specifying whether payment is government refund. * * @return the flag specifying whether payment is government refund. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public Boolean getGovRefund() { return govRefund; } /** * Sets the flag specifying whether payment is government refund. * * @param govRefund * the flag specifying whether payment is government refund. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public void setGovRefund(Boolean govRefund) { this.govRefund = govRefund; } /** * Gets the flag specifying whether payment is disapproved. * * @return the flag specifying whether payment is disapproved. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public Boolean getDisapprove() { return disapprove; } /** * Sets the flag specifying whether payment is disapproved. * * @param disapprove * the flag specifying whether payment is disapproved. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public void setDisapprove(Boolean disapprove) { this.disapprove = disapprove; } /** * Gets the flag specifying whether payment is history. * * @return the flag specifying whether payment is history. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public Boolean getHistoryPayment() { return historyPayment; } /** * Sets the flag specifying whether payment is history. * * @param historyPayment * the flag specifying whether payment is history. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public void setHistoryPayment(Boolean historyPayment) { this.historyPayment = historyPayment; } /** * Gets the flag specifying whether payment has resolved suspense. * * @return the flag specifying whether payment has resolved suspense. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public Boolean getResolvedSuspense() { return resolvedSuspense; } /** * Sets the flag specifying whether payment has resolved suspense. * * @param resolvedSuspense * the flag specifying whether payment has resolved suspense. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public void setResolvedSuspense(Boolean resolvedSuspense) { this.resolvedSuspense = resolvedSuspense; } /** * Gets the flag specifying whether payment is user inserted. * * @return the flag specifying whether payment is user inserted. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public Boolean getUserInserted() { return userInserted; } /** * Sets the flag specifying whether payment is user inserted. * * @param userInserted * the flag specifying whether payment is user inserted. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public void setUserInserted(Boolean userInserted) { this.userInserted = userInserted; } /** * Gets the flag specifying whether payment is post. * * @return the flag specifying whether payment is post. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public Boolean getPostFlag() { return postFlag; } /** * Sets the flag specifying whether payment is post. * * @param postFlag * the flag specifying whether payment is post. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public void setPostFlag(Boolean postFlag) { this.postFlag = postFlag; } /** * Gets the order code of payment. * * @return the order code of payment. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public PaymentsAppliedOrderCode getOrderCode() { return orderCode; } /** * Sets the order code of payment. * * @param orderCode * the order code of payment. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public void setOrderCode(PaymentsAppliedOrderCode orderCode) { this.orderCode = orderCode; } /** * Gets the status code of payment. * * @return the status code of payment. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public PayTransStatusCode getStatusCode() { return statusCode; } /** * Sets the status code of payment. * * @param statusCode * the status code of payment. * * @since 1.2 (OPM - Data Migration - Entities Update Module Assembly 1.0) */ public void setStatusCode(PayTransStatusCode statusCode) { this.statusCode = statusCode; } }
// Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.bazel.repository; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Ascii; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.hash.Hasher; import com.google.common.hash.Hashing; import com.google.devtools.build.lib.analysis.RuleDefinition; import com.google.devtools.build.lib.bazel.rules.workspace.MavenJarRule; import com.google.devtools.build.lib.cmdline.PackageIdentifier.RepositoryName; import com.google.devtools.build.lib.events.Location; import com.google.devtools.build.lib.packages.AggregatingAttributeMapper; import com.google.devtools.build.lib.packages.AttributeMap; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.skyframe.FileValue; import com.google.devtools.build.lib.skyframe.RepositoryValue; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.Type; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.skyframe.SkyFunctionException.Transience; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import org.eclipse.aether.RepositorySystem; import org.eclipse.aether.RepositorySystemSession; import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.artifact.DefaultArtifact; import org.eclipse.aether.repository.RemoteRepository; import org.eclipse.aether.resolution.ArtifactRequest; import org.eclipse.aether.resolution.ArtifactResolutionException; import org.eclipse.aether.resolution.ArtifactResult; import java.io.IOException; import javax.annotation.Nullable; /** * Implementation of maven_jar. */ public class MavenJarFunction extends HttpArchiveFunction { private static final String DEFAULT_SERVER = "default"; @Override public SkyValue compute(SkyKey skyKey, Environment env) throws RepositoryFunctionException { RepositoryName repositoryName = (RepositoryName) skyKey.argument(); Rule rule = RepositoryFunction.getRule(repositoryName, MavenJarRule.NAME, env); if (rule == null) { return null; } String url; AggregatingAttributeMapper mapper = AggregatingAttributeMapper.of(rule); boolean hasRepository = mapper.has("repository", Type.STRING) && !mapper.get("repository", Type.STRING).isEmpty(); boolean hasServer = mapper.has("server", Type.STRING) && !mapper.get("server", Type.STRING).isEmpty(); if (hasRepository && hasServer) { throw new RepositoryFunctionException(new EvalException( Location.fromFile(getWorkspace().getRelative("WORKSPACE")), rule + " specifies both " + "'repository' and 'server', which are mutually exclusive options"), Transience.PERSISTENT); } else if (hasRepository) { url = mapper.get("repository", Type.STRING); } else { String serverName = DEFAULT_SERVER; if (mapper.has("server", Type.STRING) && !mapper.get("server", Type.STRING).isEmpty()) { serverName = mapper.get("server", Type.STRING); } MavenServerValue mavenServerValue = (MavenServerValue) env.getValue( MavenServerValue.key(serverName)); if (mavenServerValue == null) { return null; } url = mavenServerValue.getUrl(); } MavenDownloader downloader = createMavenDownloader(mapper, url); return createOutputTree(downloader, env); } @VisibleForTesting MavenDownloader createMavenDownloader(AttributeMap mapper, String url) { String name = mapper.getName(); Path outputDirectory = getExternalRepositoryDirectory().getRelative(name); return new MavenDownloader(name, mapper, outputDirectory, url); } SkyValue createOutputTree(MavenDownloader downloader, Environment env) throws RepositoryFunctionException { FileValue outputDirectoryValue = createDirectory(downloader.getOutputDirectory(), env); if (outputDirectoryValue == null) { return null; } Path repositoryJar; try { repositoryJar = downloader.download(); } catch (IOException e) { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } // Add a WORKSPACE file & BUILD file to the Maven jar. DecompressorValue value; try { value = (DecompressorValue) env.getValueOrThrow(DecompressorValue.jarKey( MavenJarRule.NAME, downloader.getName(), repositoryJar, outputDirectoryValue.realRootedPath().asPath()), IOException.class); if (value == null) { return null; } } catch (IOException e) { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } FileValue repositoryFileValue = getRepositoryDirectory(value.getDirectory(), env); if (repositoryFileValue == null) { return null; } return RepositoryValue.create(repositoryFileValue); } @Override public SkyFunctionName getSkyFunctionName() { return SkyFunctionName.create(Ascii.toUpperCase(MavenJarRule.NAME)); } /** * @see RepositoryFunction#getRule(RepositoryName, String, Environment) */ @Override public Class<? extends RuleDefinition> getRuleDefinition() { return MavenJarRule.class; } /** * This downloader creates a connection to one or more Maven repositories and downloads a jar. */ static class MavenDownloader { private final String name; private final String artifact; private final Path outputDirectory; @Nullable private final String sha1; private final String url; public MavenDownloader( String name, AttributeMap mapper, Path outputDirectory, String url) { this.name = name; this.outputDirectory = outputDirectory; if (!mapper.get("artifact", Type.STRING).isEmpty()) { this.artifact = mapper.get("artifact", Type.STRING); } else { this.artifact = mapper.get("group_id", Type.STRING) + ":" + mapper.get("artifact_id", Type.STRING) + ":" + mapper.get("version", Type.STRING); } this.sha1 = (mapper.has("sha1", Type.STRING)) ? mapper.get("sha1", Type.STRING) : null; this.url = url; } /** * Returns the name for this artifact-fetching rule. */ public String getName() { return name; } /** * Returns the directory that this artifact will be downloaded to. */ public Path getOutputDirectory() { return outputDirectory; } /** * Download the Maven artifact to the output directory. Returns the path to the jar. */ public Path download() throws IOException { MavenConnector connector = new MavenConnector(outputDirectory.getPathString()); RepositorySystem system = connector.newRepositorySystem(); RepositorySystemSession session = connector.newRepositorySystemSession(system); RemoteRepository repository = new RemoteRepository.Builder(name, "default", url).build(); ArtifactRequest artifactRequest = new ArtifactRequest(); Artifact artifact; try { artifact = new DefaultArtifact(this.artifact); } catch (IllegalArgumentException e) { throw new IOException(e.getMessage()); } artifactRequest.setArtifact(artifact); artifactRequest.setRepositories(ImmutableList.of(repository)); try { ArtifactResult artifactResult = system.resolveArtifact(session, artifactRequest); artifact = artifactResult.getArtifact(); } catch (ArtifactResolutionException e) { throw new IOException("Failed to fetch Maven dependency: " + e.getMessage()); } Path downloadPath = outputDirectory.getRelative(artifact.getFile().getAbsolutePath()); // Verify checksum. if (!Strings.isNullOrEmpty(sha1)) { Hasher hasher = Hashing.sha1().newHasher(); String downloadSha1 = HttpDownloader.getHash(hasher, downloadPath); if (!sha1.equals(downloadSha1)) { throw new IOException("Downloaded file at " + downloadPath + " has SHA-1 of " + downloadSha1 + ", does not match expected SHA-1 (" + sha1 + ")"); } } return downloadPath; } } }
package com.github.forax.proxy2; import static org.objectweb.asm.Opcodes.*; import java.lang.invoke.CallSite; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodHandles.Lookup; import java.lang.invoke.MethodType; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.UndeclaredThrowableException; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.FieldVisitor; import org.objectweb.asm.Handle; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Type; import sun.misc.Unsafe; /** * A bunch of static factory methods to create proxy factories. * * * * Unlike java.lang.reflect.Proxy, the implementation doesn't do any caching, * so calling {@link #createAnonymousProxyFactory(Lookup, MethodType, ProxyHandler)} * or its siblings with the same interface as return type of the method type * will generated as many proxy classes as the number of calls. */ public class Proxy2 { private Proxy2() { // no instance } /** * Specify how to link a proxy method to its implementation. */ public interface ProxyHandler { /** * Provide default implementations of all methods of {@link ProxyHandler} * but {@link ProxyHandler#bootstrap(ProxyContext)}. */ public static abstract class Default implements ProxyHandler { /** * {@inheritDoc} * * @implSpec * The implementation always returns false. */ @Override public boolean override(Method method) { return false; } /** * {@inheritDoc} * * @implSpec * The implementation always returns false. */ @Override public boolean isMutable(int fieldIndex, Class<?> fieldType) { return false; } } /** * Define the bootstrap function as a functional interface. */ public interface Bootstrap { /** * Called to link a proxy method to a target method handle (through a callsite's target). * This method is called once by method at runtime the first time the proxy method is called. * * @param context object containing information like the method that will be linked * and methods to access the fields and methods of the proxy implementation. * @return a callsite object indicating how to link the method to a target method handle. * @throws Throwable if any errors occur. */ public CallSite bootstrap(ProxyContext context) throws Throwable; } /** * Returns true if the proxy field should be mutable. * @param fieldIndex the index of the proxy field. * @param fieldType the type of the proxy field. * @return true if the proxy field should be mutable, false otherwise. */ public boolean isMutable(int fieldIndex, Class<?> fieldType); /** * Returns true if the method should be overridden by the proxy. * This method is only called for method that have an existing implementation * (default methods or Object's toString(), equals() and hashCode(). * This method is called once by method when generating the proxy call. * * @param method a method of the interface that may be overridden * @return true if the method should be overridden by the proxy. */ public boolean override(Method method); /** * Called to link a proxy method to a target method handle (through a callsite's target). * This method is called once by method at runtime the first time the proxy method is called. * * @param context object containing information like the method that will be linked * and methods to access the fields and methods of the proxy implementation. * @return a callsite object indicating how to link the method to a target method handle. * @throws Throwable if any errors occur. */ public CallSite bootstrap(ProxyContext context) throws Throwable; } /** * Object that encapsulate the data that are available to implement a proxy method. */ public static class ProxyContext { private final Lookup lookup; private final MethodType methodType; private final Method method; private ProxyContext(Lookup lookup, MethodType methodType, Method method) { this.lookup = lookup; this.methodType = methodType; this.method = method; } /** * Returns the interface method about to be linked. * @return the interface method about to be linked. */ public Method method() { return method; } /** * Returns the method type of the invokedynamic call inside the implementation * of the proxy method. * This method type must also be the {@link CallSite#type() type of the callsite} * returned by {@link ProxyHandler#bootstrap(ProxyContext)}. * @return type of the invokedynamic inside the implementation of the proxy method. */ public MethodType type() { return methodType; } /** * Returns a method handle that returns the value of a field of the proxy. * @param fieldIndex the index of the field. * @param type the type of the field * @return a method handle that returns the value of a field of the proxy. * @throws NoSuchFieldException if the field doesn't exist. * * @see Lookup#findGetter(Class, String, Class) */ public MethodHandle findFieldGetter(int fieldIndex, Class<?> type) throws NoSuchFieldException { try { return lookup.findGetter(lookup.lookupClass(), "arg" + fieldIndex, type). asType(MethodType.methodType(type, Object.class)); } catch (IllegalAccessException e) { throw new AssertionError(e); } } /** * Returns a method handle that set the value of a field of the proxy. * The field must be {@link ProxyHandler#isMutable(int, Class) mutable}. * * @param fieldIndex the index of the field. * @param type the type of the field * @return a method handle that set the value of a field of the proxy.. * @throws NoSuchFieldException if the field doesn't exist. * * @see Lookup#findSetter(Class, String, Class) */ public MethodHandle findFieldSetter(int fieldIndex, Class<?> type) throws NoSuchFieldException { try { return lookup.findSetter(lookup.lookupClass(), "arg" + fieldIndex, type). asType(MethodType.methodType(void.class, Object.class, type)); } catch (IllegalAccessException e) { throw new AssertionError(e); } } // referenced by a method handle static ProxyContext create(Lookup lookup, MethodType methodType, Method method) { return new ProxyContext(lookup, methodType, method); } } /** * A factory of proxy implementing an interface. * * @param <T> the type of the proxy interface. * @see Proxy2#createAnonymousProxyFactory(Class, Class[], ProxyHandler) */ @FunctionalInterface public interface ProxyFactory<T> { /** * Create a proxy with a value for each field of the proxy. * @param fieldValues the value of each field of the proxy. * @return a new proxy instance. */ public T create(Object... fieldValues); } private static final Class<?>[] EMPTY_FIELD_TYPES = new Class<?>[0]; /** * Create a factory that will create anonymous proxy instances implementing an interface {@code type} and no field. * The {@code handler} is used to specify the linking between a method and its implementation. * The created proxy class will define no field so {@link ProxyFactory#create(Object...)} should be called with no argument. * * @param type the interface that the proxy should respect. * @param handler an interface that specifies how a proxy method is linked to its implementation. * @return a proxy factory that will create proxy instance. * * @see #createAnonymousProxyFactory(Class, Class[], ProxyHandler) */ public static <T> ProxyFactory<T> createAnonymousProxyFactory(Class<? extends T> type, ProxyHandler handler) { return createAnonymousProxyFactory(type, EMPTY_FIELD_TYPES, handler); } /** * Create a factory that will create anonymous proxy instances implementing an interface {@code type} * and with several fields described by {@code fieldTypes}. * The {@code handler} is used to specify the linking between a method and its implementation. * The created proxy class will define several fields so {@link ProxyFactory#create(Object...)} should be called with * the values of the field as argument. * * @param type the interface that the proxy should respect. * @param fieldTypes type of the fields of the generated proxy. * @param handler an interface that specifies how a proxy method is linked to its implementation. * @return a proxy factory that will create proxy instance. * * @see #createAnonymousProxyFactory(Lookup, MethodType, ProxyHandler) */ public static <T> ProxyFactory<T> createAnonymousProxyFactory(Class<? extends T> type, Class<?>[] fieldTypes, ProxyHandler handler) { MethodHandle mh = createAnonymousProxyFactory(MethodHandles.publicLookup(), MethodType.methodType(type, fieldTypes), handler); return new ProxyFactory<T>() { // don't use a lambda here to avoid cycle when retro-weaving @Override public T create(Object... fieldValues) { try { return type.cast(mh.invokeWithArguments(fieldValues)); } catch(RuntimeException | Error e) { throw e; } catch (Throwable e) { throw new UndeclaredThrowableException(e); } } }; } /** * Create a factory that will create anonymous proxy instances implementing an interface {@code type} * and with several fields described by {@code fieldTypes}. * The {@code bootstrap} is used to specify the linking between a method and its implementation. * The created proxy class will define several fields so {@link ProxyFactory#create(Object...)} should be called with * the values of the field as argument. * * @param type the interface that the proxy should respect. * @param fieldTypes type of the fields of the generated proxy. * @param bootstrap an interface that specifies how a proxy method is linked to its implementation. * @return a proxy factory that will create proxy instance. * * @see #createAnonymousProxyFactory(Lookup, MethodType, ProxyHandler) */ public static <T> ProxyFactory<T> createAnonymousProxyFactory(Class<? extends T> type, Class<?>[] fieldTypes, ProxyHandler.Bootstrap bootstrap) { return createAnonymousProxyFactory(type, fieldTypes, new ProxyHandler.Default() { @Override public CallSite bootstrap(ProxyContext context) throws Throwable { return bootstrap.bootstrap(context); } }); } private static final Unsafe UNSAFE; static { Unsafe unsafe; try { Field unsafeField = Unsafe.class.getDeclaredField("theUnsafe"); unsafeField.setAccessible(true); unsafe = (Unsafe)unsafeField.get(null); } catch (NoSuchFieldException|IllegalAccessException e) { throw new AssertionError(e); } UNSAFE = unsafe; } private static String internalName(Class<?> type) { return type.getName().replace('.', '/'); } private static String[] internalNames(Class<?>[] types) { // keep it compatible with Java 7 //return Arrays.stream(method.getExceptionTypes()).map(Proxy2::internalName).toArray(String[]::new); String[] array = new String[types.length]; for(int i = 0; i < array.length; i++) { array[i] = internalName(types[i]); } return array; } private static final boolean IS_1_8; static { boolean is1_8; try { Class.forName("java.util.Spliterator"); // 1.8 ? is1_8 = true; } catch (ClassNotFoundException e) { is1_8 = false; } IS_1_8 = is1_8; } /** * Create a factory that will create anonymous proxy instances with several fields described by * the parameter types of {@code methodType} and implementing an interface described by * the return type of {@code methodType}. * The {@code handler} is used to specify the linking between a method and its implementation. * The returned {@link MethodHandle} will have its type being equals to the {@code methodType} * taken as argument. * * @param lookup access token used to access to the interface methods * @param methodType the parameter types of this {@link MethodType} described the type of the fields * and the return type the interface implemented by the proxy. * @param handler an interface that specifies how a proxy method is linked to its implementation. * @return a method handle that if {@link MethodHandle#invokeExact(Object...) called} will create * a proxy instance of a class implementing the return interfaces. * @throws IllegalArgumentException if the proxy interface is not visible from the lookup object. * * @see #createAnonymousProxyFactory(Class, Class[], ProxyHandler) */ public static MethodHandle createAnonymousProxyFactory(Lookup lookup, MethodType methodType, ProxyHandler handler) { Class<?> interfaze = methodType.returnType(); if (lookup.in(interfaze).lookupModes() == 0) { throw new IllegalArgumentException("interface " + interfaze + " is not visible from " + lookup); } // if the proxy is in java.lang.invoke and the interface is not visible, the OpenJDK 7 VM crashes ! String proxyName = (!IS_1_8 && !Modifier.isPublic(interfaze.getModifiers()))? "com/github/forax/proxy2/Foo": "java/lang/invoke/Foo"; ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_FRAMES|ClassWriter.COMPUTE_MAXS); writer.visit(V1_7, ACC_PUBLIC|ACC_SUPER|ACC_FINAL, proxyName, null, "java/lang/Object", new String[]{ internalName(interfaze) }); String initDesc; { initDesc = methodType.changeReturnType(void.class).toMethodDescriptorString(); MethodVisitor init = writer.visitMethod(ACC_PUBLIC, "<init>", initDesc, null, null); String factoryDesc = methodType.toMethodDescriptorString(); MethodVisitor factory = writer.visitMethod(ACC_PUBLIC|ACC_STATIC, "0-^-0", factoryDesc, null, null); init.visitCode(); init.visitVarInsn(ALOAD, 0); init.visitMethodInsn(INVOKESPECIAL, "java/lang/Object", "<init>", "()V", false); factory.visitCode(); factory.visitTypeInsn(NEW, proxyName); factory.visitInsn(DUP); int slot = 1; for(int i = 0; i < methodType.parameterCount(); i++) { Class<?> boundType = methodType.parameterType(i); String fieldName = "arg" + i; int finalFlag = handler.isMutable(i, boundType)? 0: ACC_FINAL; FieldVisitor fv = writer.visitField(ACC_PRIVATE|finalFlag, fieldName, Type.getDescriptor(boundType), null, null); fv.visitEnd(); int loadOp = Type.getType(boundType).getOpcode(ILOAD); init.visitVarInsn(ALOAD, 0); init.visitVarInsn(loadOp, slot); init.visitFieldInsn(PUTFIELD, proxyName, fieldName, Type.getDescriptor(boundType)); factory.visitVarInsn(loadOp, slot - 1); slot += (boundType == long.class || boundType == double.class)? 2: 1; } init.visitInsn(RETURN); factory.visitMethodInsn(INVOKESPECIAL, proxyName, "<init>", initDesc, false); factory.visitInsn(ARETURN); init.visitMaxs(-1, -1); init.visitEnd(); factory.visitMaxs(-1, -1); factory.visitEnd(); } String mhPlaceHolder = "<<MH_HOLDER>>"; int mhHolderCPIndex = writer.newConst(mhPlaceHolder); Handle BSM = new Handle(H_INVOKESTATIC, proxyName, "bsm", MethodType.methodType(CallSite.class, Lookup.class, String.class, MethodType.class, MethodHandle.class, Method.class).toMethodDescriptorString()); { // bsm MethodVisitor mv = writer.visitMethod(ACC_PRIVATE|ACC_STATIC, "bsm", BSM.getDesc(), null, null); mv.visitCode(); mv.visitVarInsn(ALOAD, 3); // mh mv.visitVarInsn(ALOAD, 0); // lookup mv.visitVarInsn(ALOAD, 2); // method type mv.visitVarInsn(ALOAD, 4); // method mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/invoke/MethodHandle", "invokeExact", "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/invoke/MethodType;Ljava/lang/reflect/Method;)Ljava/lang/invoke/CallSite;", false); mv.visitInsn(ARETURN); mv.visitMaxs(-1, -1); mv.visitEnd(); } Method[] methods = interfaze.getMethods(); int[] methodHolderCPIndexes = new int[methods.length]; for(int methodIndex = 0; methodIndex < methods.length; methodIndex++) { Method method = methods[methodIndex]; int modifiers = method.getModifiers(); if (Modifier.isStatic(modifiers)) { continue; } //FIXME add support of public methods of java.lang.Object if (!Modifier.isAbstract(modifiers) && !handler.override(method)) { continue; } String methodDesc = Type.getMethodDescriptor(method); MethodVisitor mv = writer.visitMethod(ACC_PUBLIC, method.getName(), methodDesc, null, internalNames(method.getExceptionTypes())); mv.visitAnnotation("Ljava/lang/invoke/LambdaForm$Hidden;", true); mv.visitAnnotation("Ljava/lang/invoke/ForceInline;", true); mv.visitCode(); mv.visitVarInsn(ALOAD, 0); for(int i = 0; i < methodType.parameterCount(); i++) { Class<?> fieldType = methodType.parameterType(i); mv.visitVarInsn(ALOAD, 0); mv.visitFieldInsn(GETFIELD, proxyName, "arg" + i, Type.getDescriptor(fieldType)); } int slot = 1; for(Class<?> parameterType: method.getParameterTypes()) { mv.visitVarInsn(Type.getType(parameterType).getOpcode(ILOAD), slot); slot += (parameterType == long.class || parameterType == double.class)? 2: 1; } String methodPlaceHolder = "<<METHOD_HOLDER " + methodIndex + ">>"; methodHolderCPIndexes[methodIndex] = writer.newConst(methodPlaceHolder); mv.visitInvokeDynamicInsn(method.getName(), "(Ljava/lang/Object;" + initDesc.substring(1, initDesc.length() - 2) + methodDesc.substring(1), BSM, mhPlaceHolder, methodPlaceHolder); mv.visitInsn(Type.getReturnType(method).getOpcode(IRETURN)); mv.visitMaxs(-1, -1); mv.visitEnd(); } writer.visitEnd(); byte[] data = writer.toByteArray(); int constantPoolSize = writer.newConst("<<SENTINEL>>"); Object[] patches = new Object[constantPoolSize]; patches[mhHolderCPIndex] = MethodHandles.filterReturnValue(CONTEXT_CREATE, MethodHandles.insertArguments(BOOTSTRAP_MH, 0, handler)); for(int i = 0; i < methodHolderCPIndexes.length; i++) { patches[methodHolderCPIndexes[i]] = methods[i]; } Class<?> clazz = UNSAFE.defineAnonymousClass(interfaze, data, patches); UNSAFE.ensureClassInitialized(clazz); try { return MethodHandles.publicLookup().findStatic(clazz, "0-^-0", methodType); } catch (NoSuchMethodException | IllegalAccessException e) { throw new AssertionError(e); } } private static final MethodHandle BOOTSTRAP_MH, CONTEXT_CREATE; static { Lookup lookup = MethodHandles.lookup(); try { BOOTSTRAP_MH = lookup.findVirtual(ProxyHandler.class, "bootstrap", MethodType.methodType(CallSite.class, ProxyContext.class)); CONTEXT_CREATE = lookup.findStatic(ProxyContext.class, "create", MethodType.methodType(ProxyContext.class, Lookup.class, MethodType.class, Method.class)); } catch (NoSuchMethodException|IllegalAccessException e) { throw new AssertionError(e); } } }
package com.google.bitcoin.bouncycastle.asn1.x509; import com.google.bitcoin.bouncycastle.asn1.ASN1Encodable; import com.google.bitcoin.bouncycastle.asn1.ASN1Sequence; import com.google.bitcoin.bouncycastle.asn1.ASN1TaggedObject; import com.google.bitcoin.bouncycastle.asn1.DERGeneralizedTime; import com.google.bitcoin.bouncycastle.asn1.DERInteger; import com.google.bitcoin.bouncycastle.asn1.DERObject; import com.google.bitcoin.bouncycastle.asn1.DERTaggedObject; import com.google.bitcoin.bouncycastle.asn1.DERUTCTime; import java.util.Enumeration; /** * PKIX RFC-2459 - TBSCertList object. * <pre> * TBSCertList ::= SEQUENCE { * version Version OPTIONAL, * -- if present, shall be v2 * signature AlgorithmIdentifier, * issuer Name, * thisUpdate Time, * nextUpdate Time OPTIONAL, * revokedCertificates SEQUENCE OF SEQUENCE { * userCertificate CertificateSerialNumber, * revocationDate Time, * crlEntryExtensions Extensions OPTIONAL * -- if present, shall be v2 * } OPTIONAL, * crlExtensions [0] EXPLICIT Extensions OPTIONAL * -- if present, shall be v2 * } * </pre> */ public class TBSCertList extends ASN1Encodable { public class CRLEntry extends ASN1Encodable { ASN1Sequence seq; DERInteger userCertificate; Time revocationDate; X509Extensions crlEntryExtensions; public CRLEntry( ASN1Sequence seq) { if (seq.size() < 2 || seq.size() > 3) { throw new IllegalArgumentException("Bad sequence size: " + seq.size()); } this.seq = seq; userCertificate = DERInteger.getInstance(seq.getObjectAt(0)); revocationDate = Time.getInstance(seq.getObjectAt(1)); } public DERInteger getUserCertificate() { return userCertificate; } public Time getRevocationDate() { return revocationDate; } public X509Extensions getExtensions() { if (crlEntryExtensions == null && seq.size() == 3) { crlEntryExtensions = X509Extensions.getInstance(seq.getObjectAt(2)); } return crlEntryExtensions; } public DERObject toASN1Object() { return seq; } } private class RevokedCertificatesEnumeration implements Enumeration { private final Enumeration en; RevokedCertificatesEnumeration(Enumeration en) { this.en = en; } public boolean hasMoreElements() { return en.hasMoreElements(); } public Object nextElement() { return new CRLEntry(ASN1Sequence.getInstance(en.nextElement())); } } private class EmptyEnumeration implements Enumeration { public boolean hasMoreElements() { return false; } public Object nextElement() { return null; // TODO: check exception handling } } ASN1Sequence seq; DERInteger version; AlgorithmIdentifier signature; X509Name issuer; Time thisUpdate; Time nextUpdate; ASN1Sequence revokedCertificates; X509Extensions crlExtensions; public static TBSCertList getInstance( ASN1TaggedObject obj, boolean explicit) { return getInstance(ASN1Sequence.getInstance(obj, explicit)); } public static TBSCertList getInstance( Object obj) { if (obj instanceof TBSCertList) { return (TBSCertList)obj; } else if (obj instanceof ASN1Sequence) { return new TBSCertList((ASN1Sequence)obj); } throw new IllegalArgumentException("unknown object in factory: " + obj.getClass().getName()); } public TBSCertList( ASN1Sequence seq) { if (seq.size() < 3 || seq.size() > 7) { throw new IllegalArgumentException("Bad sequence size: " + seq.size()); } int seqPos = 0; this.seq = seq; if (seq.getObjectAt(seqPos) instanceof DERInteger) { version = DERInteger.getInstance(seq.getObjectAt(seqPos++)); } else { version = new DERInteger(0); } signature = AlgorithmIdentifier.getInstance(seq.getObjectAt(seqPos++)); issuer = X509Name.getInstance(seq.getObjectAt(seqPos++)); thisUpdate = Time.getInstance(seq.getObjectAt(seqPos++)); if (seqPos < seq.size() && (seq.getObjectAt(seqPos) instanceof DERUTCTime || seq.getObjectAt(seqPos) instanceof DERGeneralizedTime || seq.getObjectAt(seqPos) instanceof Time)) { nextUpdate = Time.getInstance(seq.getObjectAt(seqPos++)); } if (seqPos < seq.size() && !(seq.getObjectAt(seqPos) instanceof DERTaggedObject)) { revokedCertificates = ASN1Sequence.getInstance(seq.getObjectAt(seqPos++)); } if (seqPos < seq.size() && seq.getObjectAt(seqPos) instanceof DERTaggedObject) { crlExtensions = X509Extensions.getInstance(seq.getObjectAt(seqPos)); } } public int getVersion() { return version.getValue().intValue() + 1; } public DERInteger getVersionNumber() { return version; } public AlgorithmIdentifier getSignature() { return signature; } public X509Name getIssuer() { return issuer; } public Time getThisUpdate() { return thisUpdate; } public Time getNextUpdate() { return nextUpdate; } public CRLEntry[] getRevokedCertificates() { if (revokedCertificates == null) { return new CRLEntry[0]; } CRLEntry[] entries = new CRLEntry[revokedCertificates.size()]; for (int i = 0; i < entries.length; i++) { entries[i] = new CRLEntry(ASN1Sequence.getInstance(revokedCertificates.getObjectAt(i))); } return entries; } public Enumeration getRevokedCertificateEnumeration() { if (revokedCertificates == null) { return new EmptyEnumeration(); } return new RevokedCertificatesEnumeration(revokedCertificates.getObjects()); } public X509Extensions getExtensions() { return crlExtensions; } public DERObject toASN1Object() { return seq; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.app.service.editor; import java.io.IOException; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.activiti.app.domain.editor.AbstractModel; import org.activiti.app.domain.editor.AppDefinition; import org.activiti.app.domain.editor.AppModelDefinition; import org.activiti.app.domain.editor.Model; import org.activiti.app.domain.editor.ModelHistory; import org.activiti.app.domain.editor.ModelRelation; import org.activiti.app.domain.editor.ModelRelationTypes; import org.activiti.app.model.editor.ModelKeyRepresentation; import org.activiti.app.model.editor.ReviveModelResultRepresentation; import org.activiti.app.model.editor.ReviveModelResultRepresentation.UnresolveModelRepresentation; import org.activiti.app.repository.editor.ModelHistoryRepository; import org.activiti.app.repository.editor.ModelRelationRepository; import org.activiti.app.repository.editor.ModelRepository; import org.activiti.app.service.exception.InternalServerErrorException; import org.activiti.app.service.exception.NotFoundException; import org.activiti.bpmn.model.BpmnModel; import org.activiti.bpmn.model.ExtensionElement; import org.activiti.bpmn.model.UserTask; import org.activiti.editor.language.json.converter.util.CollectionUtils; import org.activiti.editor.language.json.converter.util.JsonConverterUtil; import org.activiti.engine.identity.User; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Sort; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; @Service public class ModelServiceImpl extends AbstractModelService { private final Logger log = LoggerFactory.getLogger(ModelServiceImpl.class); public static final String NAMESPACE = "http://activiti.com/modeler"; protected static final String PROCESS_NOT_FOUND_MESSAGE_KEY = "PROCESS.ERROR.NOT-FOUND"; @Autowired protected ModelRepository modelRepository; @Autowired protected ModelHistoryRepository modelHistoryRepository; @Autowired protected ModelRelationRepository modelRelationRepository; @Override public Model getModel(String modelId) { Model model = modelRepository.findOne(modelId); if (model == null) { NotFoundException modelNotFound = new NotFoundException("No model found with the given id: " + modelId); modelNotFound.setMessageKey(PROCESS_NOT_FOUND_MESSAGE_KEY); throw modelNotFound; } return model; } @Override public List<Model> getModelsByModelType(Integer modelType){ return new ArrayList<Model>(modelRepository.findModelsByModelType(modelType)); } @Override public List<Model> getModelsByModelType(Integer modelType,String filter){ return new ArrayList<Model>(modelRepository.findModelsByModelType(modelType,filter)); } @Override public List<Model> getReferencedModels(String modelId) { return modelRepository.findModelsByParentModelId(modelId); } @Override public ModelHistory getModelHistory(String modelHistoryId) { ModelHistory modelHistory = modelHistoryRepository.findOne(modelHistoryId); // Check if history corresponds to the current model and is not deleted if (modelHistory == null || modelHistory.getRemovalDate() != null) { throw new NotFoundException("Process model history not found: " + modelHistoryId); } return modelHistory; } @Override public List<ModelHistory> getModelHistory(Model model) { return modelHistoryRepository.findByModelIdAndRemovalDateIsNullOrderByVersionDesc(model.getId()); } @Override public ModelHistory getModelHistory(String modelId, String modelHistoryId) { // Check if the user has read-rights on the process-model in order to fetch history Model model = getModel(modelId); ModelHistory modelHistory = getModelHistory(modelHistoryId); // Check if history corresponds to the current model and is not deleted if (!modelHistory.getModelId().equals(model.getId())) { throw new NotFoundException("Process model history not for this model " + modelHistoryId); } return modelHistory; } @Override public List<ModelHistory> getModelHistoryForUser(User user, Integer modelType){ return modelHistoryRepository.findByCreatedByAndModelTypeAndRemovalDateIsNull(user.getId(), modelType); } @Override public ModelKeyRepresentation validateModelKey(Model model, Integer modelType, String key) { ModelKeyRepresentation modelKeyResponse = new ModelKeyRepresentation(); modelKeyResponse.setKey(key); List<Model> models = modelRepository.findModelsByKeyAndType(key, modelType); for (Model modelInfo : models) { if (model == null || modelInfo.getId().equals(model.getId()) == false) { modelKeyResponse.setKeyAlreadyExists(true); modelKeyResponse.setId(modelInfo.getId()); modelKeyResponse.setName(modelInfo.getName()); break; } } return modelKeyResponse; } @Override @Transactional public Model createNewModelVersion(Model modelObject, String comment, User updatedBy) { return (Model) internalCreateNewModelVersion(modelObject, comment, updatedBy, false); } @Override @Transactional public ModelHistory createNewModelVersionAndReturnModelHistory(Model modelObject, String comment, User updatedBy) { return (ModelHistory) internalCreateNewModelVersion(modelObject, comment, updatedBy, true); } protected AbstractModel internalCreateNewModelVersion(Model modelObject, String comment, User updatedBy, boolean returnModelHistory) { modelObject.setLastUpdated(new Date()); modelObject.setLastUpdatedBy(updatedBy.getId()); modelObject.setComment(comment); ModelHistory historyModel = createNewModelhistory(modelObject); persistModelHistory(historyModel); modelObject.setVersion(modelObject.getVersion() + 1); persistModel(modelObject); return returnModelHistory ? historyModel : modelObject; } @Override public Model saveModel(Model modelObject) { return persistModel(modelObject); } @Override @Transactional public Model saveModel(Model modelObject, String editorJson, byte[] imageBytes, boolean newVersion, String newVersionComment, User updatedBy) { return internalSave(modelObject.getName(), modelObject.getKey(), modelObject.getDescription(), editorJson, newVersion, newVersionComment, imageBytes, updatedBy, modelObject); } @Override @Transactional public Model saveModel(String modelId, String name, String key, String description, String editorJson, boolean newVersion, String newVersionComment, User updatedBy) { Model modelObject = modelRepository.findOne(modelId); return internalSave(name, key, description, editorJson, newVersion, newVersionComment, null, updatedBy, modelObject); } protected Model internalSave(String name, String key, String description, String editorJson, boolean newVersion, String newVersionComment, byte[] imageBytes, User updatedBy, Model modelObject) { if (newVersion == false) { modelObject.setLastUpdated(new Date()); modelObject.setLastUpdatedBy(updatedBy.getId()); modelObject.setName(name); modelObject.setKey(key); modelObject.setDescription(description); modelObject.setModelEditorJson(editorJson); if (imageBytes != null) { modelObject.setThumbnail(imageBytes); } } else { ModelHistory historyModel = createNewModelhistory(modelObject); persistModelHistory(historyModel); modelObject.setVersion(modelObject.getVersion() + 1); modelObject.setLastUpdated(new Date()); modelObject.setLastUpdatedBy(updatedBy.getId()); modelObject.setName(name); modelObject.setKey(key); modelObject.setDescription(description); modelObject.setModelEditorJson(editorJson); modelObject.setComment(newVersionComment); if (imageBytes != null) { modelObject.setThumbnail(imageBytes); } } return persistModel(modelObject); } @Override @Transactional public void deleteModel(String modelId, boolean cascadeHistory, boolean deleteRuntimeApp, String comment, User deletedBy) { Model model = modelRepository.findOne(modelId); if (model == null) { throw new IllegalArgumentException("No model found with id: " + modelId); } // Fetch current model history list List<ModelHistory> history = modelHistoryRepository.findByModelIdAndRemovalDateIsNullOrderByVersionDesc(model.getId()); // if the model is an app definition and the runtime app needs to be deleted, remove it now if (deleteRuntimeApp && model.getModelType() == Model.MODEL_TYPE_APP) { String appDefinitionId = modelRepository.appDefinitionIdByModelAndUser(modelId, deletedBy.getId()); if (appDefinitionId != null) { deleteAppDefinition(appDefinitionId); } } else { // Move model to history and mark removed ModelHistory historyModel = createNewModelhistory(model); historyModel.setRemovalDate(Calendar.getInstance().getTime()); persistModelHistory(historyModel); } if (cascadeHistory || history.size() == 0) { deleteModelAndChildren(model); } else { // History available and no cascade was requested. Revive latest history entry ModelHistory toRevive = history.remove(0); populateModelBasedOnHistory(model, toRevive); persistModel(model); modelHistoryRepository.delete(toRevive); } } protected void deleteModelAndChildren(Model model) { // Models have relations with each other, in all kind of wicked and funny ways. // Hence, we remove first all relations, comments, etc. while collecting all models. // Then, once all foreign key problemmakers are removed, we remove the models List<Model> allModels = new ArrayList<Model>(); internalDeleteModelAndChildren(model, allModels); for (Model modelToDelete : allModels) { modelRepository.delete(modelToDelete); } } protected void internalDeleteModelAndChildren(Model model, List<Model> allModels) { // Delete all related data modelRelationRepository.deleteModelRelationsForParentModel(model.getId()); allModels.add(model); } @Override @Transactional public ReviveModelResultRepresentation reviveProcessModelHistory(ModelHistory modelHistory, User user, String newVersionComment) { Model latestModel = modelRepository.findOne(modelHistory.getModelId()); if (latestModel == null) { throw new IllegalArgumentException("No process model found with id: " + modelHistory.getModelId()); } // Store the current model in history ModelHistory latestModelHistory = createNewModelhistory(latestModel); persistModelHistory(latestModelHistory); // Populate the actual latest version with the properties in the historic model latestModel.setVersion(latestModel.getVersion() + 1); latestModel.setLastUpdated(new Date()); latestModel.setLastUpdatedBy(user.getId()); latestModel.setName(modelHistory.getName()); latestModel.setKey(modelHistory.getKey()); latestModel.setDescription(modelHistory.getDescription()); latestModel.setModelEditorJson(modelHistory.getModelEditorJson()); latestModel.setModelType(modelHistory.getModelType()); latestModel.setComment(newVersionComment); persistModel(latestModel); ReviveModelResultRepresentation result = new ReviveModelResultRepresentation(); // For apps, we need to make sure the referenced processes exist as models. // It could be the user has deleted the process model in the meantime. We give back that info to the user. if (latestModel.getModelType() == AbstractModel.MODEL_TYPE_APP) { if (StringUtils.isNotEmpty(latestModel.getModelEditorJson())) { try { AppDefinition appDefinition = getObjectMapper().readValue(latestModel.getModelEditorJson(), AppDefinition.class); for (AppModelDefinition appModelDefinition : appDefinition.getModels()) { if (!modelRepository.exists(appModelDefinition.getId())) { result.getUnresolvedModels().add(new UnresolveModelRepresentation(appModelDefinition.getId(), appModelDefinition.getName(), appModelDefinition.getLastUpdatedBy())); } } } catch (Exception e) { log.error("Could not deserialize app model json (id = " + latestModel.getId() + ")", e); } } } return result; } @Override public BpmnModel getBpmnModel(AbstractModel model) { BpmnModel bpmnModel = null; try { Map<String, Model> formMap = new HashMap<String, Model>(); Map<String, Model> decisionTableMap = new HashMap<String, Model>(); List<Model> referencedModels = getReferencedModels(model.getId()); for (Model childModel : referencedModels) { if (Model.MODEL_TYPE_FORM == childModel.getModelType()) { formMap.put(childModel.getId(), childModel); } else if (Model.MODEL_TYPE_DECISION_TABLE == childModel.getModelType()) { decisionTableMap.put(childModel.getId(), childModel); } } bpmnModel = getBpmnModel(model, formMap, decisionTableMap); } catch (Exception e) { log.error("Could not generate BPMN 2.0 model for " + model.getId(), e); throw new InternalServerErrorException("Could not generate BPMN 2.0 model"); } return bpmnModel; } @Override public BpmnModel getBpmnModel(AbstractModel model, Map<String, Model> formMap, Map<String, Model> decisionTableMap) { try { ObjectNode editorJsonNode = (ObjectNode) getObjectMapper().readTree(model.getModelEditorJson()); Map<String, String> formKeyMap = new HashMap<String, String>(); for (Model formModel : formMap.values()) { formKeyMap.put(formModel.getId(), formModel.getKey()); } Map<String, String> decisionTableKeyMap = new HashMap<String, String>(); for (Model decisionTableModel : decisionTableMap.values()) { decisionTableKeyMap.put(decisionTableModel.getId(), decisionTableModel.getKey()); } return bpmnJsonConverter.convertToBpmnModel(editorJsonNode, formKeyMap, decisionTableKeyMap); } catch (Exception e) { log.error("Could not generate BPMN 2.0 model for " + model.getId(), e); throw new InternalServerErrorException("Could not generate BPMN 2.0 model"); } } protected void addOrUpdateExtensionElement(String name, String value, UserTask userTask) { List<ExtensionElement> extensionElements = userTask.getExtensionElements().get(name); ExtensionElement extensionElement; if (CollectionUtils.isNotEmpty(extensionElements)) { extensionElement = extensionElements.get(0); } else { extensionElement = new ExtensionElement(); } extensionElement.setNamespace(NAMESPACE); extensionElement.setNamespacePrefix("modeler"); extensionElement.setName(name); extensionElement.setElementText(value); if (CollectionUtils.isEmpty(extensionElements)) { userTask.addExtensionElement(extensionElement); } } @Override public Long getModelCountForUser(User user, Integer modelType) { return modelRepository.countByModelTypeAndUser(modelType, user.getId()); } @Override public List<Model> getModelsForUser(User user, Integer modelType, String filter, Sort sort){ if(filter == null){ return modelRepository.findModelsCreatedBy(user.getId(), modelType, sort); } return modelRepository.findModelsCreatedBy(user.getId(), modelType, filter, sort); } protected Model persistModel(Model model) { model = modelRepository.save((Model) model); if (StringUtils.isNotEmpty(model.getModelEditorJson())) { // Parse json to java ObjectNode jsonNode = null; try { jsonNode = (ObjectNode) getObjectMapper().readTree(model.getModelEditorJson()); } catch (Exception e) { log.error("Could not deserialize json model", e); throw new InternalServerErrorException("Could not deserialize json model"); } if ((model.getModelType() == null || model.getModelType().intValue() == Model.MODEL_TYPE_BPMN)) { // Thumbnail generateThumbnailImage(model, jsonNode); // Relations handleBpmnProcessFormModelRelations(model, jsonNode); handleBpmnProcessDecisionTaskModelRelations(model, jsonNode); } else if (model.getModelType().intValue() == Model.MODEL_TYPE_FORM || model.getModelType().intValue() == Model.MODEL_TYPE_DECISION_TABLE) { jsonNode.put("name", model.getName()); jsonNode.put("key", model.getKey()); } else if (model.getModelType().intValue() == Model.MODEL_TYPE_APP) { handleAppModelProcessRelations(model, jsonNode); } } return model; } protected ModelHistory persistModelHistory(ModelHistory modelHistory) { return modelHistoryRepository.save(modelHistory); } protected void handleBpmnProcessFormModelRelations(AbstractModel bpmnProcessModel, ObjectNode editorJsonNode) { List<JsonNode> formReferenceNodes = JsonConverterUtil.filterOutJsonNodes(JsonConverterUtil.getBpmnProcessModelFormReferences(editorJsonNode)); Set<String> formIds = JsonConverterUtil.gatherStringPropertyFromJsonNodes(formReferenceNodes, "id"); handleModelRelations(bpmnProcessModel, formIds, ModelRelationTypes.TYPE_FORM_MODEL_CHILD); } protected void handleBpmnProcessDecisionTaskModelRelations(AbstractModel bpmnProcessModel, ObjectNode editorJsonNode) { List<JsonNode> decisionTableNodes = JsonConverterUtil.filterOutJsonNodes(JsonConverterUtil.getBpmnProcessModelDecisionTableReferences(editorJsonNode)); Set<String> decisionTableIds = JsonConverterUtil.gatherStringPropertyFromJsonNodes(decisionTableNodes, "id"); handleModelRelations(bpmnProcessModel, decisionTableIds, ModelRelationTypes.TYPE_DECISION_TABLE_MODEL_CHILD); } protected void handleAppModelProcessRelations(AbstractModel appModel, ObjectNode appModelJsonNode) { Set<String> processModelIds = JsonConverterUtil.getAppModelReferencedModelIds(appModelJsonNode); handleModelRelations(appModel, processModelIds, ModelRelationTypes.TYPE_PROCESS_MODEL); } /** * Generic handling of model relations: deleting/adding where needed. */ protected void handleModelRelations(AbstractModel bpmnProcessModel, Set<String> idsReferencedInJson, String relationshipType) { // Find existing persisted relations List<ModelRelation> persistedModelRelations = modelRelationRepository.findByParentModelIdAndType(bpmnProcessModel.getId(), relationshipType); // if no ids referenced now, just delete them all if (idsReferencedInJson == null || idsReferencedInJson.size() == 0) { modelRelationRepository.delete(persistedModelRelations); return; } Set<String> alreadyPersistedModelIds = new HashSet<String>(persistedModelRelations.size()); for (ModelRelation persistedModelRelation : persistedModelRelations) { if (!idsReferencedInJson.contains(persistedModelRelation.getModelId())) { // model used to be referenced, but not anymore. Delete it. modelRelationRepository.delete((ModelRelation) persistedModelRelation); } else { alreadyPersistedModelIds.add(persistedModelRelation.getModelId()); } } // Loop over all referenced ids and see which one are new for (String idReferencedInJson : idsReferencedInJson) { // if model is referenced, but it is not yet persisted = create it if (!alreadyPersistedModelIds.contains(idReferencedInJson)) { // Check if model actually still exists. Don't create the relationship if it doesn't exist. The client UI will have cope with this too. if (modelRepository.exists(idReferencedInJson)) { modelRelationRepository.save(new ModelRelation(bpmnProcessModel.getId(), idReferencedInJson, relationshipType)); } } } } protected ModelHistory createNewModelhistory(Model model) { ModelHistory historyModel = new ModelHistory(); historyModel.setName(model.getName()); historyModel.setKey(model.getKey()); historyModel.setDescription(model.getDescription()); historyModel.setCreated(model.getCreated()); historyModel.setLastUpdated(model.getLastUpdated()); historyModel.setCreatedBy(model.getCreatedBy()); historyModel.setLastUpdatedBy(model.getLastUpdatedBy()); historyModel.setModelEditorJson(model.getModelEditorJson()); historyModel.setModelType(model.getModelType()); historyModel.setVersion(model.getVersion()); historyModel.setModelId(model.getId()); historyModel.setComment(model.getComment()); return historyModel; } protected void populateModelBasedOnHistory(Model model, ModelHistory basedOn) { model.setName(basedOn.getName()); model.setKey(basedOn.getKey()); model.setDescription(basedOn.getDescription()); model.setCreated(basedOn.getCreated()); model.setLastUpdated(basedOn.getLastUpdated()); model.setCreatedBy(basedOn.getCreatedBy()); model.setLastUpdatedBy(basedOn.getLastUpdatedBy()); model.setModelEditorJson(basedOn.getModelEditorJson()); model.setModelType(basedOn.getModelType()); model.setVersion(basedOn.getVersion()); model.setComment(basedOn.getComment()); } @Override public Integer getModelType(String pModelId) { Model model = getModel(pModelId); if(model == null){ throw new NullPointerException("Could load model " + pModelId); //$NON-NLS-1$ } return model.getModelType(); } @Override public ObjectNode loadJson(String pModelId) { Model model = getModel(pModelId); if(model == null){ throw new NullPointerException("Could load model " + pModelId); //$NON-NLS-1$ } try { return (ObjectNode) getObjectMapper().readTree(model.getModelEditorJson()); } catch (IOException e) { throw new InternalServerErrorException("Could load model " + pModelId, e); //$NON-NLS-1$ } } }
package com.yehyunryu.android.socialweather.utils; import android.content.ContentValues; import android.text.TextUtils; import android.util.Log; import com.yehyunryu.android.socialweather.BuildConfig; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.Charset; import static com.yehyunryu.android.socialweather.data.WeatherContract.WeatherEntry; /** * Copyright 2017 Yehyun Ryu Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class NetworkUtils { private static final String LOG_TAG = NetworkUtils.class.getSimpleName(); //base url for weather private static final String WEATHER_BASE_URL = "http://api.openweathermap.org/data/2.5/forecast?q="; //base url of google places search private static final String PLACES_SEARCH_BASE_URL = "https://maps.googleapis.com/maps/api/place/textsearch/json?query="; //base url of google places photos private static final String PLACES_PHOTOS_BASE_URL = "https://maps.googleapis.com/maps/api/place/photo?maxheight=1200&photoreference="; //api key from open weather map, stored in gradle.properties private static final String WEATHER_API_KEY = "&appid=" + BuildConfig.OPEN_WEATHER_MAP_API_KEY; //api key from google places api, stored in gradle.properties private static final String PLACES_API_KEY = "&key=" + BuildConfig.GOOGLE_PLACES_API_KEY; //creates an url for weather from open weather map private static URL createWeatherUrl(String location) { Log.d(LOG_TAG, "createWeatherUrl"); String stringUrl = WEATHER_BASE_URL + location + WEATHER_API_KEY; URL url = null; try { url = new URL(stringUrl); } catch(MalformedURLException e) { Log.e(LOG_TAG, "Error creating weather URL: " + e); } return url; } //creates an url for open weather map forecast //creates url for google places api private static URL createPlacesUrl(String location) { Log.d(LOG_TAG, "createPlacesUrl"); String stringUrl = PLACES_SEARCH_BASE_URL + location + PLACES_API_KEY; URL url = null; try { url = new URL(stringUrl); } catch(MalformedURLException e) { Log.e(LOG_TAG, "Error creating places URL: " + e); } return url; } //connects and read from open weather map servers private static String makeHTTPRequest(URL url) throws IOException { Log.d(LOG_TAG, "makeHTTPRequest"); //json response to return String jsonResponse = ""; //return early if url is empty if(url == null) { return jsonResponse; } //google places api returns an error response of 400 if there is a space in the query String stringUrl = url.toString(); if(stringUrl.contains(" ")) { stringUrl = stringUrl.replace(" ", "&20"); url = new URL(stringUrl); } HttpURLConnection connection = null; InputStream inputStream = null; try { //establishes connection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(15000); connection.setReadTimeout(10000); connection.connect(); if(connection.getResponseCode() == 200) { inputStream = connection.getInputStream(); jsonResponse = readFromStream(inputStream); } else { Log.e(LOG_TAG, "URL: " + url); Log.e(LOG_TAG, "Bad Response Code: " + connection.getResponseCode()); } } catch(Exception e) { Log.e(LOG_TAG, "Error making HTTP request: " + e); } finally { //closes input stream and connection if(inputStream != null) inputStream.close(); if(connection != null) connection.disconnect(); return jsonResponse; } } //reads from input stream and returns an output of string private static String readFromStream(InputStream inputStream) throws IOException { Log.d(LOG_TAG, "readFromStream"); //output string StringBuilder output = new StringBuilder(); if(inputStream != null) { InputStreamReader inputStreamReader = new InputStreamReader(inputStream, Charset.forName("UTF-8")); BufferedReader reader = new BufferedReader(inputStreamReader); String line = reader.readLine(); while(line != null) { output.append(line); line = reader.readLine(); } } return output.toString(); } //extract forecast weather data private static ContentValues extractForecastWeatherFromJSON(String jsonResponse) { Log.d(LOG_TAG, "extractForecastWeatherFromJSON"); //return null content value if json response is empty if(TextUtils.isEmpty(jsonResponse)) { Log.e(LOG_TAG, "Empty json response"); return null; } //Content Values to return ContentValues contentValues = new ContentValues(); try { JSONObject baseJson = new JSONObject(jsonResponse); JSONArray list = baseJson.getJSONArray("list"); //to store data String timeStamps = ""; String weatherIds = ""; String weatherDescriptions = ""; String minTemps = ""; String maxTemps = ""; String pressures = ""; String humidities = ""; String windSpeeds = ""; for(int i = 0; i < list.length(); i++) { JSONObject listItem = list.getJSONObject(i); //get timestamp long time = listItem.getLong("dt"); timeStamps += String.valueOf(time) + "%%%"; //get weather id and description JSONArray weatherArray = listItem.getJSONArray("weather"); JSONObject weather = weatherArray.getJSONObject(0); int weatherId = weather.getInt("id"); String description = weather.getString("description"); weatherIds += String.valueOf(weatherId) + "%%%"; weatherDescriptions += description + "%%%"; //get min max temp, pressure, and humidity JSONObject main = listItem.getJSONObject("main"); double minTemp = main.getDouble("temp_min"); double maxTemp = main.getDouble("temp_max"); double pressure = main.getDouble("pressure"); int humidity = main.getInt("humidity"); minTemps += String.valueOf(minTemp) + "%%%"; maxTemps += String.valueOf(maxTemp) + "%%%"; pressures += String.valueOf(pressure) + "%%%"; humidities += String.valueOf(humidity) + "%%%"; //get wind speed JSONObject wind = listItem.getJSONObject("wind"); double windSpeed = wind.getDouble("speed"); windSpeeds += String.valueOf(windSpeed) + "%%%"; } //put data into content values contentValues.put(WeatherEntry.COLUMN_FORECAST_WEATHER_TIMES, timeStamps.substring(0, timeStamps.length() - 3)); contentValues.put(WeatherEntry.COLUMN_FORECAST_WEATHER_IDS, weatherIds.substring(0, weatherIds.length() - 3)); contentValues.put(WeatherEntry.COLUMN_FORECAST_WEATHER_DESCRIPTIONS, weatherDescriptions.substring(0, weatherDescriptions.length() - 3)); contentValues.put(WeatherEntry.COLUMN_FORECAST_WEATHER_MIN_TEMPS, minTemps.substring(0, minTemps.length() - 3)); contentValues.put(WeatherEntry.COLUMN_FORECAST_WEATHER_MAX_TEMPS, maxTemps.substring(0, maxTemps.length() - 3)); contentValues.put(WeatherEntry.COLUMN_FORECAST_WEATHER_PRESSURES, pressures.substring(0, pressures.length() - 3)); contentValues.put(WeatherEntry.COLUMN_FORECAST_WEATHER_HUMIDITIES, humidities.substring(0, humidities.length() - 3)); contentValues.put(WeatherEntry.COLUMN_FORECAST_WEATHER_WIND_SPEEDS, windSpeeds.substring(0, windSpeeds.length() - 3)); } catch(JSONException e) { Log.e(LOG_TAG, "Error extracting forecast from JSON: " + e); } return contentValues; } private static String extractPlacesFromJSON(String jsonResponse) { Log.d(LOG_TAG, "extractPlacesFromJSON"); //return empty string if json response is empty if(TextUtils.isEmpty(jsonResponse)) { return "location_photo_empty"; //linked to strings.xml and default value in WeatherDbHelper } //photo url to return String photoUrl = "location_photo_empty"; //linked to strings.xml and default value in WeatherDbHelper try { JSONObject baseJson = new JSONObject(jsonResponse); //check status String status = baseJson.getString("status"); Log.d(LOG_TAG, "Place Query Status: " + status); if(!status.equals("OK")) { return photoUrl; } //get results JSONArray results = baseJson.getJSONArray("results"); if(results.length() == 0) { //check if there are results return photoUrl; } //get photo JSONObject result = results.getJSONObject(0); if(result.isNull("photos")) { //check if there are photos return photoUrl; } JSONArray photos = result.getJSONArray("photos"); JSONObject photo = photos.getJSONObject(0); //extract photo reference to build photo url String photoReference = photo.getString("photo_reference"); photoUrl = PLACES_PHOTOS_BASE_URL + photoReference + PLACES_API_KEY; //for debugging Log.d(LOG_TAG, photoUrl); } catch(JSONException e) { Log.e(LOG_TAG, "Error extracting places from JSON: " + e); } return photoUrl; } //returns content value of weather with location string public static ContentValues fetchWeather(String location) { Log.d(LOG_TAG, "fetchWeather"); //create url URL forecastUrl = createWeatherUrl(location); //get json response String forecastJsonResponse = ""; try { forecastJsonResponse = makeHTTPRequest(forecastUrl); } catch(IOException e) { Log.e(LOG_TAG, "Error with HTTP request: " + e); } //get content values from json response return extractForecastWeatherFromJSON(forecastJsonResponse); } //returns content value of place photo public static String fetchPhoto(String location) { Log.d(LOG_TAG, "fetchPhoto"); //create url URL url = createPlacesUrl(location); //get json response String jsonResponse = ""; try { jsonResponse = makeHTTPRequest(url); } catch(IOException e) { Log.e(LOG_TAG, "Error with HTTP request: " + e); } //get content values from json response return extractPlacesFromJSON(jsonResponse); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.state; import org.apache.flink.runtime.checkpoint.metadata.CheckpointTestUtils; import org.junit.Test; import java.util.Map; import java.util.Random; import java.util.UUID; import static org.junit.Assert.fail; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.powermock.api.mockito.PowerMockito.spy; public class IncrementalRemoteKeyedStateHandleTest { /** * This test checks, that for an unregistered {@link IncrementalRemoteKeyedStateHandle} all state * (including shared) is discarded. */ @Test public void testUnregisteredDiscarding() throws Exception { IncrementalRemoteKeyedStateHandle stateHandle = create(new Random(42)); stateHandle.discardState(); for (StreamStateHandle handle : stateHandle.getPrivateState().values()) { verify(handle).discardState(); } for (StreamStateHandle handle : stateHandle.getSharedState().values()) { verify(handle).discardState(); } verify(stateHandle.getMetaStateHandle()).discardState(); } /** * This test checks, that for a registered {@link IncrementalRemoteKeyedStateHandle} discards respect * all shared state and only discard it one all references are released. */ @Test public void testSharedStateDeRegistration() throws Exception { SharedStateRegistry registry = spy(new SharedStateRegistry()); // Create two state handles with overlapping shared state IncrementalRemoteKeyedStateHandle stateHandle1 = create(new Random(42)); IncrementalRemoteKeyedStateHandle stateHandle2 = create(new Random(42)); // Both handles should not be registered and not discarded by now. for (Map.Entry<StateHandleID, StreamStateHandle> entry : stateHandle1.getSharedState().entrySet()) { SharedStateRegistryKey registryKey = stateHandle1.createSharedStateRegistryKeyFromFileName(entry.getKey()); verify(registry, times(0)).unregisterReference(registryKey); verify(entry.getValue(), times(0)).discardState(); } for (Map.Entry<StateHandleID, StreamStateHandle> entry : stateHandle2.getSharedState().entrySet()) { SharedStateRegistryKey registryKey = stateHandle1.createSharedStateRegistryKeyFromFileName(entry.getKey()); verify(registry, times(0)).unregisterReference(registryKey); verify(entry.getValue(), times(0)).discardState(); } // Now we register both ... stateHandle1.registerSharedStates(registry); stateHandle2.registerSharedStates(registry); for (Map.Entry<StateHandleID, StreamStateHandle> stateHandleEntry : stateHandle1.getSharedState().entrySet()) { SharedStateRegistryKey registryKey = stateHandle1.createSharedStateRegistryKeyFromFileName(stateHandleEntry.getKey()); verify(registry).registerReference( registryKey, stateHandleEntry.getValue()); } for (Map.Entry<StateHandleID, StreamStateHandle> stateHandleEntry : stateHandle2.getSharedState().entrySet()) { SharedStateRegistryKey registryKey = stateHandle1.createSharedStateRegistryKeyFromFileName(stateHandleEntry.getKey()); verify(registry).registerReference( registryKey, stateHandleEntry.getValue()); } // We discard the first stateHandle1.discardState(); // Should be unregistered, non-shared discarded, shared not discarded for (Map.Entry<StateHandleID, StreamStateHandle> entry : stateHandle1.getSharedState().entrySet()) { SharedStateRegistryKey registryKey = stateHandle1.createSharedStateRegistryKeyFromFileName(entry.getKey()); verify(registry, times(1)).unregisterReference(registryKey); verify(entry.getValue(), times(0)).discardState(); } for (StreamStateHandle handle : stateHandle2.getSharedState().values()) { verify(handle, times(0)).discardState(); } for (Map.Entry<StateHandleID, StreamStateHandle> handleEntry : stateHandle1.getPrivateState().entrySet()) { SharedStateRegistryKey registryKey = stateHandle1.createSharedStateRegistryKeyFromFileName(handleEntry.getKey()); verify(registry, times(0)).unregisterReference(registryKey); verify(handleEntry.getValue(), times(1)).discardState(); } for (Map.Entry<StateHandleID, StreamStateHandle> handleEntry : stateHandle2.getPrivateState().entrySet()) { SharedStateRegistryKey registryKey = stateHandle1.createSharedStateRegistryKeyFromFileName(handleEntry.getKey()); verify(registry, times(0)).unregisterReference(registryKey); verify(handleEntry.getValue(), times(0)).discardState(); } verify(stateHandle1.getMetaStateHandle(), times(1)).discardState(); verify(stateHandle2.getMetaStateHandle(), times(0)).discardState(); // We discard the second stateHandle2.discardState(); // Now everything should be unregistered and discarded for (Map.Entry<StateHandleID, StreamStateHandle> entry : stateHandle1.getSharedState().entrySet()) { SharedStateRegistryKey registryKey = stateHandle1.createSharedStateRegistryKeyFromFileName(entry.getKey()); verify(registry, times(2)).unregisterReference(registryKey); verify(entry.getValue()).discardState(); } for (Map.Entry<StateHandleID, StreamStateHandle> entry : stateHandle2.getSharedState().entrySet()) { SharedStateRegistryKey registryKey = stateHandle1.createSharedStateRegistryKeyFromFileName(entry.getKey()); verify(registry, times(2)).unregisterReference(registryKey); verify(entry.getValue()).discardState(); } verify(stateHandle1.getMetaStateHandle(), times(1)).discardState(); verify(stateHandle2.getMetaStateHandle(), times(1)).discardState(); } /** * This tests that re-registration of shared state with another registry works as expected. This simulates a * recovery from a checkpoint, when the checkpoint coordinator creates a new shared state registry and re-registers * all live checkpoint states. */ @Test public void testSharedStateReRegistration() throws Exception { SharedStateRegistry stateRegistryA = spy(new SharedStateRegistry()); IncrementalRemoteKeyedStateHandle stateHandleX = create(new Random(1)); IncrementalRemoteKeyedStateHandle stateHandleY = create(new Random(2)); IncrementalRemoteKeyedStateHandle stateHandleZ = create(new Random(3)); // Now we register first time ... stateHandleX.registerSharedStates(stateRegistryA); stateHandleY.registerSharedStates(stateRegistryA); stateHandleZ.registerSharedStates(stateRegistryA); try { // Second attempt should fail stateHandleX.registerSharedStates(stateRegistryA); fail("Should not be able to register twice with the same registry."); } catch (IllegalStateException ignore) { } // Everything should be discarded for this handle stateHandleZ.discardState(); verify(stateHandleZ.getMetaStateHandle(), times(1)).discardState(); for (StreamStateHandle stateHandle : stateHandleZ.getSharedState().values()) { verify(stateHandle, times(1)).discardState(); } // Close the first registry stateRegistryA.close(); // Attempt to register to closed registry should trigger exception try { create(new Random(4)).registerSharedStates(stateRegistryA); fail("Should not be able to register new state to closed registry."); } catch (IllegalStateException ignore) { } // All state should still get discarded stateHandleY.discardState(); verify(stateHandleY.getMetaStateHandle(), times(1)).discardState(); for (StreamStateHandle stateHandle : stateHandleY.getSharedState().values()) { verify(stateHandle, times(1)).discardState(); } // This should still be unaffected verify(stateHandleX.getMetaStateHandle(), never()).discardState(); for (StreamStateHandle stateHandle : stateHandleX.getSharedState().values()) { verify(stateHandle, never()).discardState(); } // We re-register the handle with a new registry SharedStateRegistry sharedStateRegistryB = spy(new SharedStateRegistry()); stateHandleX.registerSharedStates(sharedStateRegistryB); stateHandleX.discardState(); // Should be completely discarded because it is tracked through the new registry verify(stateHandleX.getMetaStateHandle(), times(1)).discardState(); for (StreamStateHandle stateHandle : stateHandleX.getSharedState().values()) { verify(stateHandle, times(1)).discardState(); } sharedStateRegistryB.close(); } private static IncrementalRemoteKeyedStateHandle create(Random rnd) { return new IncrementalRemoteKeyedStateHandle( UUID.nameUUIDFromBytes("test".getBytes()), KeyGroupRange.of(0, 0), 1L, placeSpies(CheckpointTestUtils.createRandomStateHandleMap(rnd)), placeSpies(CheckpointTestUtils.createRandomStateHandleMap(rnd)), spy(CheckpointTestUtils.createDummyStreamStateHandle(rnd, null))); } private static Map<StateHandleID, StreamStateHandle> placeSpies( Map<StateHandleID, StreamStateHandle> map) { for (Map.Entry<StateHandleID, StreamStateHandle> entry : map.entrySet()) { entry.setValue(spy(entry.getValue())); } return map; } }
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.limit.datepicker.datepicker.simonvt; import android.content.Context; import android.hardware.SensorManager; import android.view.ViewConfiguration; import android.view.animation.AnimationUtils; import android.view.animation.Interpolator; /** * This class encapsulates scrolling. The duration of the scroll * can be passed in the constructor and specifies the maximum time that * the scrolling animation should take. Past this time, the scrolling is * automatically moved to its final stage and computeScrollOffset() * will always return false to indicate that scrolling is over. */ public class Scroller { private int mMode; private int mStartX; private int mStartY; private int mFinalX; private int mFinalY; private int mMinX; private int mMaxX; private int mMinY; private int mMaxY; private int mCurrX; private int mCurrY; private long mStartTime; private int mDuration; private float mDurationReciprocal; private float mDeltaX; private float mDeltaY; private boolean mFinished; private Interpolator mInterpolator; private boolean mFlywheel; private float mVelocity; private static final int DEFAULT_DURATION = 250; private static final int SCROLL_MODE = 0; private static final int FLING_MODE = 1; private static float DECELERATION_RATE = (float) (Math.log(0.75) / Math.log(0.9)); private static float ALPHA = 800; // pixels / seconds private static float START_TENSION = 0.4f; // Tension at start: (0.4 * total T, 1.0 * Distance) private static float END_TENSION = 1.0f - START_TENSION; private static final int NB_SAMPLES = 100; private static final float[] SPLINE = new float[NB_SAMPLES + 1]; private float mDeceleration; private final float mPpi; static { float x_min = 0.0f; for (int i = 0; i <= NB_SAMPLES; i++) { final float t = (float) i / NB_SAMPLES; float x_max = 1.0f; float x, tx, coef; while (true) { x = x_min + (x_max - x_min) / 2.0f; coef = 3.0f * x * (1.0f - x); tx = coef * ((1.0f - x) * START_TENSION + x * END_TENSION) + x * x * x; if (Math.abs(tx - t) < 1E-5) break; if (tx > t) x_max = x; else x_min = x; } final float d = coef + x * x * x; SPLINE[i] = d; } SPLINE[NB_SAMPLES] = 1.0f; // This controls the viscous fluid effect (how much of it) sViscousFluidScale = 8.0f; // must be set to 1.0 (used in viscousFluid()) sViscousFluidNormalize = 1.0f; sViscousFluidNormalize = 1.0f / viscousFluid(1.0f); } private static float sViscousFluidScale; private static float sViscousFluidNormalize; /** * Create a Scroller with the default duration and interpolator. */ public Scroller(Context context) { this(context, null); } /** * Create a Scroller with the specified interpolator. If the interpolator is * null, the default (viscous) interpolator will be used. "Flywheel" behavior will * be in effect for apps targeting Honeycomb or newer. */ public Scroller(Context context, Interpolator interpolator) { this(context, interpolator, true); } /** * Create a Scroller with the specified interpolator. If the interpolator is * null, the default (viscous) interpolator will be used. Specify whether or * not to support progressive "flywheel" behavior in flinging. */ public Scroller(Context context, Interpolator interpolator, boolean flywheel) { mFinished = true; mInterpolator = interpolator; mPpi = context.getResources().getDisplayMetrics().density * 160.0f; mDeceleration = computeDeceleration(ViewConfiguration.getScrollFriction()); mFlywheel = flywheel; } /** * The amount of friction applied to flings. The default value * is {@link ViewConfiguration#getScrollFriction}. * * @param friction A scalar dimension-less value representing the coefficient of * friction. */ public final void setFriction(float friction) { mDeceleration = computeDeceleration(friction); } private float computeDeceleration(float friction) { return SensorManager.GRAVITY_EARTH // g (m/s^2) * 39.37f // inch/meter * mPpi // pixels per inch * friction; } /** * * Returns whether the scroller has finished scrolling. * * @return True if the scroller has finished scrolling, false otherwise. */ public final boolean isFinished() { return mFinished; } /** * Force the finished field to a particular value. * * @param finished The new finished value. */ public final void forceFinished(boolean finished) { mFinished = finished; } /** * Returns how long the scroll event will take, in milliseconds. * * @return The duration of the scroll in milliseconds. */ public final int getDuration() { return mDuration; } /** * Returns the current X offset in the scroll. * * @return The new X offset as an absolute distance from the origin. */ public final int getCurrX() { return mCurrX; } /** * Returns the current Y offset in the scroll. * * @return The new Y offset as an absolute distance from the origin. */ public final int getCurrY() { return mCurrY; } /** * Returns the current velocity. * * @return The original velocity less the deceleration. Result may be * negative. */ public float getCurrVelocity() { return mVelocity - mDeceleration * timePassed() / 2000.0f; } /** * Returns the start X offset in the scroll. * * @return The start X offset as an absolute distance from the origin. */ public final int getStartX() { return mStartX; } /** * Returns the start Y offset in the scroll. * * @return The start Y offset as an absolute distance from the origin. */ public final int getStartY() { return mStartY; } /** * Returns where the scroll will end. Valid only for "fling" scrolls. * * @return The final X offset as an absolute distance from the origin. */ public final int getFinalX() { return mFinalX; } /** * Returns where the scroll will end. Valid only for "fling" scrolls. * * @return The final Y offset as an absolute distance from the origin. */ public final int getFinalY() { return mFinalY; } /** * Call this when you want to know the new location. If it returns true, * the animation is not yet finished. loc will be altered to provide the * new location. */ public boolean computeScrollOffset() { if (mFinished) { return false; } int timePassed = (int)(AnimationUtils.currentAnimationTimeMillis() - mStartTime); if (timePassed < mDuration) { switch (mMode) { case SCROLL_MODE: float x = timePassed * mDurationReciprocal; if (mInterpolator == null) x = viscousFluid(x); else x = mInterpolator.getInterpolation(x); mCurrX = mStartX + Math.round(x * mDeltaX); mCurrY = mStartY + Math.round(x * mDeltaY); break; case FLING_MODE: final float t = (float) timePassed / mDuration; final int index = (int) (NB_SAMPLES * t); final float t_inf = (float) index / NB_SAMPLES; final float t_sup = (float) (index + 1) / NB_SAMPLES; final float d_inf = SPLINE[index]; final float d_sup = SPLINE[index + 1]; final float distanceCoef = d_inf + (t - t_inf) / (t_sup - t_inf) * (d_sup - d_inf); mCurrX = mStartX + Math.round(distanceCoef * (mFinalX - mStartX)); // Pin to mMinX <= mCurrX <= mMaxX mCurrX = Math.min(mCurrX, mMaxX); mCurrX = Math.max(mCurrX, mMinX); mCurrY = mStartY + Math.round(distanceCoef * (mFinalY - mStartY)); // Pin to mMinY <= mCurrY <= mMaxY mCurrY = Math.min(mCurrY, mMaxY); mCurrY = Math.max(mCurrY, mMinY); if (mCurrX == mFinalX && mCurrY == mFinalY) { mFinished = true; } break; } } else { mCurrX = mFinalX; mCurrY = mFinalY; mFinished = true; } return true; } /** * Start scrolling by providing a starting point and the distance to travel. * The scroll will use the default value of 250 milliseconds for the * duration. * * @param startX Starting horizontal scroll offset in pixels. Positive * numbers will scroll the content to the left. * @param startY Starting vertical scroll offset in pixels. Positive numbers * will scroll the content up. * @param dx Horizontal distance to travel. Positive numbers will scroll the * content to the left. * @param dy Vertical distance to travel. Positive numbers will scroll the * content up. */ public void startScroll(int startX, int startY, int dx, int dy) { startScroll(startX, startY, dx, dy, DEFAULT_DURATION); } /** * Start scrolling by providing a starting point and the distance to travel. * * @param startX Starting horizontal scroll offset in pixels. Positive * numbers will scroll the content to the left. * @param startY Starting vertical scroll offset in pixels. Positive numbers * will scroll the content up. * @param dx Horizontal distance to travel. Positive numbers will scroll the * content to the left. * @param dy Vertical distance to travel. Positive numbers will scroll the * content up. * @param duration Duration of the scroll in milliseconds. */ public void startScroll(int startX, int startY, int dx, int dy, int duration) { mMode = SCROLL_MODE; mFinished = false; mDuration = duration; mStartTime = AnimationUtils.currentAnimationTimeMillis(); mStartX = startX; mStartY = startY; mFinalX = startX + dx; mFinalY = startY + dy; mDeltaX = dx; mDeltaY = dy; mDurationReciprocal = 1.0f / (float) mDuration; } /** * Start scrolling based on a fling gesture. The distance travelled will * depend on the initial velocity of the fling. * * @param startX Starting point of the scroll (X) * @param startY Starting point of the scroll (Y) * @param velocityX Initial velocity of the fling (X) measured in pixels per * second. * @param velocityY Initial velocity of the fling (Y) measured in pixels per * second * @param minX Minimum X value. The scroller will not scroll past this * point. * @param maxX Maximum X value. The scroller will not scroll past this * point. * @param minY Minimum Y value. The scroller will not scroll past this * point. * @param maxY Maximum Y value. The scroller will not scroll past this * point. */ public void fling(int startX, int startY, int velocityX, int velocityY, int minX, int maxX, int minY, int maxY) { // Continue a scroll or fling in progress if (mFlywheel && !mFinished) { float oldVel = getCurrVelocity(); float dx = (float) (mFinalX - mStartX); float dy = (float) (mFinalY - mStartY); float hyp = (float) Math.sqrt(dx * dx + dy * dy); float ndx = dx / hyp; float ndy = dy / hyp; float oldVelocityX = ndx * oldVel; float oldVelocityY = ndy * oldVel; if (Math.signum(velocityX) == Math.signum(oldVelocityX) && Math.signum(velocityY) == Math.signum(oldVelocityY)) { velocityX += oldVelocityX; velocityY += oldVelocityY; } } mMode = FLING_MODE; mFinished = false; float velocity = (float) Math.sqrt(velocityX * velocityX + velocityY * velocityY); mVelocity = velocity; final double l = Math.log(START_TENSION * velocity / ALPHA); mDuration = (int) (1000.0 * Math.exp(l / (DECELERATION_RATE - 1.0))); mStartTime = AnimationUtils.currentAnimationTimeMillis(); mStartX = startX; mStartY = startY; float coeffX = velocity == 0 ? 1.0f : velocityX / velocity; float coeffY = velocity == 0 ? 1.0f : velocityY / velocity; int totalDistance = (int) (ALPHA * Math.exp(DECELERATION_RATE / (DECELERATION_RATE - 1.0) * l)); mMinX = minX; mMaxX = maxX; mMinY = minY; mMaxY = maxY; mFinalX = startX + Math.round(totalDistance * coeffX); // Pin to mMinX <= mFinalX <= mMaxX mFinalX = Math.min(mFinalX, mMaxX); mFinalX = Math.max(mFinalX, mMinX); mFinalY = startY + Math.round(totalDistance * coeffY); // Pin to mMinY <= mFinalY <= mMaxY mFinalY = Math.min(mFinalY, mMaxY); mFinalY = Math.max(mFinalY, mMinY); } static float viscousFluid(float x) { x *= sViscousFluidScale; if (x < 1.0f) { x -= (1.0f - (float) Math.exp(-x)); } else { float start = 0.36787944117f; // 1/e == exp(-1) x = 1.0f - (float) Math.exp(1.0f - x); x = start + x * (1.0f - start); } x *= sViscousFluidNormalize; return x; } /** * Stops the animation. Contrary to {@link #forceFinished(boolean)}, * aborting the animating cause the scroller to move to the final x and y * position * * @see #forceFinished(boolean) */ public void abortAnimation() { mCurrX = mFinalX; mCurrY = mFinalY; mFinished = true; } /** * Extend the scroll animation. This allows a running animation to scroll * further and longer, when used with {@link #setFinalX(int)} or {@link #setFinalY(int)}. * * @param extend Additional time to scroll in milliseconds. * @see #setFinalX(int) * @see #setFinalY(int) */ public void extendDuration(int extend) { int passed = timePassed(); mDuration = passed + extend; mDurationReciprocal = 1.0f / mDuration; mFinished = false; } /** * Returns the time elapsed since the beginning of the scrolling. * * @return The elapsed time in milliseconds. */ public int timePassed() { return (int)(AnimationUtils.currentAnimationTimeMillis() - mStartTime); } /** * Sets the final position (X) for this scroller. * * @param newX The new X offset as an absolute distance from the origin. * @see #extendDuration(int) * @see #setFinalY(int) */ public void setFinalX(int newX) { mFinalX = newX; mDeltaX = mFinalX - mStartX; mFinished = false; } /** * Sets the final position (Y) for this scroller. * * @param newY The new Y offset as an absolute distance from the origin. * @see #extendDuration(int) * @see #setFinalX(int) */ public void setFinalY(int newY) { mFinalY = newY; mDeltaY = mFinalY - mStartY; mFinished = false; } /** * @hide */ public boolean isScrollingInDirection(float xvel, float yvel) { return !mFinished && Math.signum(xvel) == Math.signum(mFinalX - mStartX) && Math.signum(yvel) == Math.signum(mFinalY - mStartY); } }
/* * Copyright 2017 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.domain; import com.thoughtworks.go.config.*; import com.thoughtworks.go.helper.PipelineConfigMother; import com.thoughtworks.go.helper.StageConfigMother; import org.apache.commons.collections.map.SingletonMap; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Matchers; import java.util.*; import static com.thoughtworks.go.util.DataStructureUtils.a; import static com.thoughtworks.go.util.DataStructureUtils.m; import static com.thoughtworks.go.util.TestUtils.contains; import static junit.framework.Assert.assertFalse; import static junit.framework.Assert.assertTrue; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.*; public class StageConfigTest { private String md5 = "md5-test"; @Test public void shouldSetPrimitiveAttributes() throws Exception{ StageConfig config = new StageConfig(); config.setConfigAttributes(new SingletonMap(StageConfig.NAME, "foo_bar")); config.setConfigAttributes(new SingletonMap(StageConfig.FETCH_MATERIALS, "0")); config.setConfigAttributes(new SingletonMap(StageConfig.CLEAN_WORKING_DIR, "1")); assertThat(config.name(), is(new CaseInsensitiveString("foo_bar"))); assertThat(config.isFetchMaterials(), is(false)); assertThat(config.isCleanWorkingDir(), is(true)); } @Test public void shouldSetArtifactCleanupOptOutAttribute() throws Exception{ StageConfig config = new StageConfig(); assertThat(config.isArtifactCleanupProhibited(), is(false)); config.setConfigAttributes(new SingletonMap(StageConfig.ARTIFACT_CLEANUP_PROHIBITED, "1")); assertThat(config.isArtifactCleanupProhibited(), is(true)); config.setConfigAttributes(new HashMap()); assertThat(config.isArtifactCleanupProhibited(), is(true)); config.setConfigAttributes(new SingletonMap(StageConfig.ARTIFACT_CLEANUP_PROHIBITED, "0")); assertThat(config.isArtifactCleanupProhibited(), is(false)); } @Test public void shouldRemoveStageLevelAuthorizationWhenInheritingPermissionsFromGroup() { StageConfig config = new StageConfig(); StageConfigMother.addApprovalWithRoles(config, "role1"); StageConfigMother.addApprovalWithUsers(config, "user1"); HashMap map = new HashMap(); List operateUsers = new ArrayList(); operateUsers.add(nameMap("user1")); map.put(StageConfig.OPERATE_USERS, operateUsers); List operateRoles = new ArrayList(); operateRoles.add(nameMap("role1")); map.put(StageConfig.OPERATE_ROLES, operateRoles); map.put(StageConfig.SECURITY_MODE, "inherit"); config.setConfigAttributes(map); assertThat(config.getApproval().getAuthConfig().isEmpty(), is(true)); } @Test public void shouldSetOperateUsers() { StageConfig config = new StageConfig(); HashMap map = new HashMap(); List operateUsers = new ArrayList(); operateUsers.add(nameMap("user1")); operateUsers.add(nameMap("user1")); operateUsers.add(nameMap("user2")); map.put(StageConfig.OPERATE_USERS, operateUsers); map.put(StageConfig.OPERATE_ROLES, new ArrayList()); map.put(StageConfig.SECURITY_MODE, "define"); config.setConfigAttributes(map); assertThat(config.getOperateUsers().size(), is(2)); assertThat(config.getOperateUsers(), hasItem(new AdminUser(new CaseInsensitiveString("user1")))); assertThat(config.getOperateUsers(), hasItem(new AdminUser(new CaseInsensitiveString("user2")))); } @Test public void shouldSetOperateRoles() { StageConfig config = new StageConfig(); HashMap map = new HashMap(); List operateRoles = new ArrayList(); operateRoles.add(nameMap("role1")); operateRoles.add(nameMap("role1")); operateRoles.add(nameMap("role2")); map.put(StageConfig.OPERATE_ROLES, operateRoles); map.put(StageConfig.OPERATE_USERS, new ArrayList()); map.put(StageConfig.SECURITY_MODE, "define"); config.setConfigAttributes(map); assertThat(config.getOperateRoles().size(), is(2)); assertThat(config.getOperateRoles(), hasItem(new AdminRole(new CaseInsensitiveString("role1")))); assertThat(config.getOperateRoles(), hasItem(new AdminRole(new CaseInsensitiveString("role2")))); } private Map nameMap(final String name) { Map valueHashMap = new HashMap(); valueHashMap.put("name", name); return valueHashMap; } @Test public void shouldPopulateEnvironmentVariablesFromAttributeMap() { StageConfig stageConfig = new StageConfig(); HashMap map = new HashMap(); HashMap valueHashMap = new HashMap(); valueHashMap.put("name", "FOO"); valueHashMap.put("value", "BAR"); map.put(StageConfig.ENVIRONMENT_VARIABLES, valueHashMap); EnvironmentVariablesConfig mockEnvironmentVariablesConfig = mock(EnvironmentVariablesConfig.class); stageConfig.setVariables(mockEnvironmentVariablesConfig); stageConfig.setConfigAttributes(map); verify(mockEnvironmentVariablesConfig).setConfigAttributes(valueHashMap); } @Test public void shouldSetApprovalFromConfigAttrs() throws Exception{ StageConfig config = new StageConfig(); config.setConfigAttributes(new SingletonMap(StageConfig.APPROVAL, new SingletonMap(Approval.TYPE, Approval.MANUAL))); assertThat(config.getApproval().getType(), is(Approval.MANUAL)); config.setConfigAttributes(new HashMap()); assertThat(config.getApproval().getType(), is(Approval.MANUAL)); config.setConfigAttributes(new SingletonMap(StageConfig.APPROVAL, new SingletonMap(Approval.TYPE, Approval.SUCCESS))); assertThat(config.getApproval().getType(), is(Approval.SUCCESS)); config.setConfigAttributes(new HashMap()); assertThat(config.getApproval().getType(), is(Approval.SUCCESS)); } @Test public void shouldPickupJobConfigDetailsFromAttributeMap() throws Exception{ StageConfig config = new StageConfig(); Map stageAttrs = m(StageConfig.JOBS, a(m(JobConfig.NAME, "con-job"), m(JobConfig.NAME, "boring-job"))); config.setConfigAttributes(stageAttrs); assertThat(config.getJobs().get(0).name(), is(new CaseInsensitiveString("con-job"))); assertThat(config.getJobs().get(1).name(), is(new CaseInsensitiveString("boring-job"))); } @Test public void shouldFindCorrectJobIfJobIsOnAllAgents() throws Exception { JobConfig allAgentsJob = new JobConfig("job-for-all-agents"); allAgentsJob.setRunOnAllAgents(true); JobConfigs jobs = new JobConfigs(); jobs.add(allAgentsJob); jobs.add(new JobConfig("job")); StageConfig stage = new StageConfig(new CaseInsensitiveString("stage-name"), jobs); JobConfig found = stage.jobConfigByInstanceName("job-for-all-agents-" + RunOnAllAgentsJobTypeConfig.MARKER + "-1", true); assertThat(found, is(allAgentsJob)); } @Test public void shouldFindCorrectJobIfJobIsOnAllAgentsAndAmbiguousName() throws Exception { JobConfig allAgentsJob = new JobConfig("job-for-all-agents"); JobConfig ambiguousJob = new JobConfig("job-for-all"); allAgentsJob.setRunOnAllAgents(true); ambiguousJob.setRunOnAllAgents(true); JobConfigs jobs = new JobConfigs(); jobs.add(ambiguousJob); jobs.add(allAgentsJob); StageConfig stage = new StageConfig(new CaseInsensitiveString("stage-name"), jobs); JobConfig found = stage.jobConfigByInstanceName(RunOnAllAgents.CounterBasedJobNameGenerator.appendMarker("job-for-all-agents", 1), true); assertThat(found, is(allAgentsJob)); } @Test public void shouldReturnTrueIfStageHasTests() { StageConfig stageWithTests = StageConfigMother.stageConfigWithArtifact("stage1", "job1", ArtifactType.unit); StageConfig stageWithoutTests = StageConfigMother.stageConfigWithArtifact("stage2", "job2", ArtifactType.file); assertThat(stageWithTests.hasTests(), is(true)); assertThat(stageWithoutTests.hasTests(), is(false)); } @Test public void shouldPopulateErrorMessagesWhenHasJobNamesRepeated() { CruiseConfig config = new BasicCruiseConfig(); PipelineConfig pipelineConfig = PipelineConfigMother.createPipelineConfig("pipeline", "stage-1", "con-job"); config.addPipeline("group-foo", pipelineConfig); StageConfig stageConfig = pipelineConfig.get(0); JobConfig newJob = new JobConfig("foo!"); StageConfig newlyAddedStage = new StageConfig(new CaseInsensitiveString("."), new JobConfigs(newJob)); pipelineConfig.addStageWithoutValidityAssertion(newlyAddedStage); stageConfig.getJobs().addJobWithoutValidityAssertion(new JobConfig(new CaseInsensitiveString("con-job"), new ResourceConfigs(), new ArtifactConfigs(), new Tasks(new ExecTask("ls", "-la", "foo")))); List<ConfigErrors> allErrors = config.validateAfterPreprocess(); assertThat(allErrors.size(), is(4)); assertThat(allErrors.get(0).on(JobConfig.NAME), is("You have defined multiple jobs called 'con-job'. Job names are case-insensitive and must be unique.")); assertThat(allErrors.get(1).on(JobConfig.NAME), is("You have defined multiple jobs called 'con-job'. Job names are case-insensitive and must be unique.")); assertThat(allErrors.get(2).on(StageConfig.NAME), is("Invalid stage name '.'. This must be alphanumeric and can contain underscores and periods (however, it cannot start with a period). The maximum allowed length is 255 characters.")); assertThat(allErrors.get(3).on(JobConfig.NAME), is("Invalid job name 'foo!'. This must be alphanumeric and may contain underscores and periods. The maximum allowed length is 255 characters.")); assertThat(stageConfig.getJobs().get(0).errors().on(JobConfig.NAME), is("You have defined multiple jobs called 'con-job'. Job names are case-insensitive and must be unique.")); assertThat(stageConfig.getJobs().get(1).errors().on(JobConfig.NAME), is("You have defined multiple jobs called 'con-job'. Job names are case-insensitive and must be unique.")); assertThat(newlyAddedStage.errors().on(StageConfig.NAME), is("Invalid stage name '.'. This must be alphanumeric and can contain underscores and periods (however, it cannot start with a period). The maximum allowed length is 255 characters.")); assertThat(newJob.errors().on(JobConfig.NAME), is("Invalid job name 'foo!'. This must be alphanumeric and may contain underscores and periods. The maximum allowed length is 255 characters.")); } @Test public void shouldReturnAllTheUsersAndRoleThatCanOperateThisStage() { StageConfig stage = StageConfigMother.stageConfig("stage"); StageConfigMother.addApprovalWithUsers(stage, "user1", "user2"); StageConfigMother.addApprovalWithRoles(stage, "role1", "role2"); assertThat(stage.getOperateUsers(), is(Arrays.asList(new AdminUser(new CaseInsensitiveString("user1")), new AdminUser(new CaseInsensitiveString("user2"))))); assertThat(stage.getOperateRoles(), is(Arrays.asList(new AdminRole(new CaseInsensitiveString("role1")), new AdminRole(new CaseInsensitiveString("role2"))))); } @Test public void shouldFailValidationWhenNameIsBlank(){ StageConfig stageConfig = new StageConfig(); stageConfig.validate(null); assertThat(stageConfig.errors().on(StageConfig.NAME), contains("Invalid stage name 'null'")); stageConfig.setName(null); stageConfig.validate(null); assertThat(stageConfig.errors().on(StageConfig.NAME), contains("Invalid stage name 'null'")); stageConfig.setName(new CaseInsensitiveString("")); stageConfig.validate(null); assertThat(stageConfig.errors().on(StageConfig.NAME), contains("Invalid stage name 'null'")); } @Test public void shouldValidateTree(){ EnvironmentVariablesConfig variables = mock(EnvironmentVariablesConfig.class); JobConfigs jobConfigs = mock(JobConfigs.class); Approval approval = mock(Approval.class); StageConfig stageConfig = new StageConfig(new CaseInsensitiveString("stage$"), jobConfigs, approval); stageConfig.setVariables(variables); stageConfig.validateTree(PipelineConfigSaveValidationContext.forChain(true, "group", new PipelineConfig(), stageConfig)); assertThat(stageConfig.errors().on(StageConfig.NAME), contains("Invalid stage name 'stage$'")); ArgumentCaptor<PipelineConfigSaveValidationContext> captor = ArgumentCaptor.forClass(PipelineConfigSaveValidationContext.class); verify(jobConfigs).validateTree(captor.capture()); PipelineConfigSaveValidationContext childContext = captor.getValue(); assertThat(childContext.getParent(), is(stageConfig)); verify(approval).validateTree(childContext); verify(variables).validateTree(childContext); } @Test public void shouldAddValidateTreeErrorsOnStageConfigIfPipelineIsAssociatedToATemplate(){ Approval approval = mock(Approval.class); JobConfigs jobConfigs = mock(JobConfigs.class); ConfigErrors jobErrors = new ConfigErrors(); jobErrors.add("KEY", "ERROR"); when(jobConfigs.errors()).thenReturn(jobErrors); StageConfig stageConfig = new StageConfig(new CaseInsensitiveString("stage$"), jobConfigs, approval); PipelineConfig pipelineConfig = new PipelineConfig(); pipelineConfig.setTemplateName(new CaseInsensitiveString("template")); stageConfig.validateTree(PipelineConfigSaveValidationContext.forChain(true, "group", pipelineConfig, stageConfig)); assertThat(stageConfig.errors().on(StageConfig.NAME), contains("Invalid stage name 'stage$'")); } @Test public void shouldReturnTrueIfAllDescendentsAreValid(){ EnvironmentVariablesConfig variables = mock(EnvironmentVariablesConfig.class); JobConfigs jobConfigs = mock(JobConfigs.class); Approval approval = mock(Approval.class); when(variables.validateTree(Matchers.<PipelineConfigSaveValidationContext>any())).thenReturn(true); when(jobConfigs.validateTree(Matchers.<PipelineConfigSaveValidationContext>any())).thenReturn(true); when(approval.validateTree(Matchers.<PipelineConfigSaveValidationContext>any())).thenReturn(true); StageConfig stageConfig = new StageConfig(new CaseInsensitiveString("p1"), jobConfigs); stageConfig.setVariables(variables); stageConfig.setApproval(approval); boolean isValid = stageConfig.validateTree(PipelineConfigSaveValidationContext.forChain(true, "group", new PipelineConfig(), stageConfig)); assertTrue(isValid); verify(jobConfigs).validateTree(Matchers.<PipelineConfigSaveValidationContext>any()); verify(variables).validateTree(Matchers.<PipelineConfigSaveValidationContext>any()); verify(approval).validateTree(Matchers.<PipelineConfigSaveValidationContext>any()); } @Test public void shouldReturnFalseIfAnyDescendentIsInValid(){ EnvironmentVariablesConfig variables = mock(EnvironmentVariablesConfig.class); JobConfigs jobConfigs = mock(JobConfigs.class); Approval approval = mock(Approval.class); when(variables.validateTree(Matchers.<PipelineConfigSaveValidationContext>any())).thenReturn(false); when(jobConfigs.validateTree(Matchers.<PipelineConfigSaveValidationContext>any())).thenReturn(false); when(approval.validateTree(Matchers.<PipelineConfigSaveValidationContext>any())).thenReturn(false); StageConfig stageConfig = new StageConfig(new CaseInsensitiveString("p1"), jobConfigs); stageConfig.setVariables(variables); stageConfig.setApproval(approval); boolean isValid = stageConfig.validateTree(PipelineConfigSaveValidationContext.forChain(true, "group", new PipelineConfig(), stageConfig)); assertFalse(isValid); verify(jobConfigs).validateTree(Matchers.<PipelineConfigSaveValidationContext>any()); verify(variables).validateTree(Matchers.<PipelineConfigSaveValidationContext>any()); verify(approval).validateTree(Matchers.<PipelineConfigSaveValidationContext>any()); } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.maven.wizards; import com.intellij.icons.AllIcons; import com.intellij.ide.util.projectWizard.ModuleBuilder; import com.intellij.ide.util.projectWizard.ModuleWizardStep; import com.intellij.ide.util.projectWizard.SourcePathsBuilder; import com.intellij.ide.util.projectWizard.WizardContext; import com.intellij.openapi.module.JavaModuleType; import com.intellij.openapi.module.ModuleType; import com.intellij.openapi.module.StdModuleTypes; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.DumbAwareRunnable; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.JavaSdk; import com.intellij.openapi.projectRoots.SdkTypeId; import com.intellij.openapi.roots.ModifiableRootModel; import com.intellij.openapi.roots.ui.configuration.ModulesProvider; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import icons.MavenIcons; import org.jetbrains.annotations.NotNull; import org.jetbrains.idea.maven.model.MavenArchetype; import org.jetbrains.idea.maven.model.MavenId; import org.jetbrains.idea.maven.project.MavenEnvironmentForm; import org.jetbrains.idea.maven.project.MavenProject; import org.jetbrains.idea.maven.project.MavenProjectsManager; import org.jetbrains.idea.maven.utils.MavenUtil; import javax.swing.*; import java.io.File; import java.util.Collections; import java.util.List; import java.util.Map; public class MavenModuleBuilder extends ModuleBuilder implements SourcePathsBuilder { private MavenProject myAggregatorProject; private MavenProject myParentProject; private boolean myInheritGroupId; private boolean myInheritVersion; private MavenId myProjectId; private MavenArchetype myArchetype; private MavenEnvironmentForm myEnvironmentForm; private Map<String, String> myPropertiesToCreateByArtifact; public void setupRootModel(ModifiableRootModel rootModel) throws ConfigurationException { final Project project = rootModel.getProject(); final VirtualFile root = createAndGetContentEntry(); rootModel.addContentEntry(root); rootModel.inheritSdk(); MavenUtil.runWhenInitialized(project, new DumbAwareRunnable() { public void run() { if (myEnvironmentForm != null) { myEnvironmentForm.setData(MavenProjectsManager.getInstance(project).getGeneralSettings()); } new MavenModuleBuilderHelper(myProjectId, myAggregatorProject, myParentProject, myInheritGroupId, myInheritVersion, myArchetype, myPropertiesToCreateByArtifact, "Create new Maven module").configure(project, root, false); } }); } @Override public String getBuilderId() { return getClass().getName(); } @Override public String getPresentableName() { return "Maven Module"; } @Override public String getDescription() { return "Maven modules are used for developing <b>JVM-based</b> applications with dependencies managed by <b>Maven</b>. " + "You can create either a blank Maven module or a module based on a <b>Maven archetype</b>."; } @Override public Icon getBigIcon() { return AllIcons.Modules.Types.JavaModule; } @Override public Icon getNodeIcon() { return MavenIcons.MavenLogo; } public ModuleType getModuleType() { return StdModuleTypes.JAVA; } @Override public boolean isSuitableSdkType(SdkTypeId sdk) { return sdk == JavaSdk.getInstance(); } @Override public ModuleWizardStep[] createWizardSteps(@NotNull WizardContext wizardContext, @NotNull ModulesProvider modulesProvider) { return new ModuleWizardStep[]{ new MavenModuleWizardStep(wizardContext.getProject(), this, wizardContext), new SelectPropertiesStep(wizardContext.getProject(), this) }; } public MavenProject findPotentialParentProject(Project project) { if (!MavenProjectsManager.getInstance(project).isMavenizedProject()) return null; File parentDir = new File(getContentEntryPath()).getParentFile(); if (parentDir == null) return null; VirtualFile parentPom = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(new File(parentDir, "pom.xml")); if (parentPom == null) return null; return MavenProjectsManager.getInstance(project).findProject(parentPom); } private VirtualFile createAndGetContentEntry() { String path = FileUtil.toSystemIndependentName(getContentEntryPath()); new File(path).mkdirs(); return LocalFileSystem.getInstance().refreshAndFindFileByPath(path); } public List<Pair<String, String>> getSourcePaths() { return Collections.emptyList(); } public void setSourcePaths(List<Pair<String, String>> sourcePaths) { } public void addSourcePath(Pair<String, String> sourcePathInfo) { } public void setAggregatorProject(MavenProject project) { myAggregatorProject = project; } public MavenProject getAggregatorProject() { return myAggregatorProject; } public void setParentProject(MavenProject project) { myParentProject = project; } public MavenProject getParentProject() { return myParentProject; } public void setInheritedOptions(boolean groupId, boolean version) { myInheritGroupId = groupId; myInheritVersion = version; } public boolean isInheritGroupId() { return myInheritGroupId; } public boolean isInheritVersion() { return myInheritVersion; } public void setProjectId(MavenId id) { myProjectId = id; } public MavenId getProjectId() { return myProjectId; } public void setArchetype(MavenArchetype archetype) { myArchetype = archetype; } public MavenArchetype getArchetype() { return myArchetype; } public MavenEnvironmentForm getEnvironmentForm() { return myEnvironmentForm; } public void setEnvironmentForm(MavenEnvironmentForm environmentForm) { myEnvironmentForm = environmentForm; } public Map<String, String> getPropertiesToCreateByArtifact() { return myPropertiesToCreateByArtifact; } public void setPropertiesToCreateByArtifact(Map<String, String> propertiesToCreateByArtifact) { myPropertiesToCreateByArtifact = propertiesToCreateByArtifact; } @Override public String getGroupName() { return JavaModuleType.JAVA_GROUP; } }
package org.drools.decisiontable; import static org.junit.Assert.assertEquals; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.Serializable; import org.drools.core.util.FileManager; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.kie.internal.KnowledgeBase; import org.kie.internal.agent.KnowledgeAgent; import org.kie.internal.agent.KnowledgeAgentFactory; import org.kie.internal.io.ResourceChangeScannerConfiguration; import org.kie.internal.io.ResourceFactory; public class ScannerChangeSetTest { public static final String TMP_DIR = "target/classes/"; FileManager fileManager; @Before public void setUp() throws Exception { fileManager = new FileManager(); fileManager.setUp(); } @After public void tearDown() throws Exception { fileManager.tearDown(); } @Test @Ignore public void testCSVByResourceChangeScanner() throws InterruptedException, IOException { // load contents of resource decision tables String first = fileManager .readInputStreamReaderAsString(new InputStreamReader(getClass() .getResourceAsStream("changeSetTestCSV.csv"))); String second = fileManager .readInputStreamReaderAsString(new InputStreamReader(getClass() .getResourceAsStream("changeSetTestCSV2.csv"))); // write first version of the decision table rules File file = new File( TMP_DIR + "scannerChangeSetTestCSV.csv"); file.delete(); fileManager.write(file, first); Thread.sleep(1100); // start scanning service with interval 1s ResourceChangeScannerConfiguration config = ResourceFactory .getResourceChangeScannerService() .newResourceChangeScannerConfiguration(); config.setProperty("drools.resource.scanner.interval", "1"); ResourceFactory.getResourceChangeScannerService().configure(config); ResourceFactory.getResourceChangeNotifierService().start(); ResourceFactory.getResourceChangeScannerService().start(); // load knowledge base via knowledge agent KnowledgeAgent kagent = KnowledgeAgentFactory .newKnowledgeAgent("csv agent"); kagent.applyChangeSet(ResourceFactory.newClassPathResource( "scannerChangeSetTestCSV.xml", getClass())); KnowledgeBase kbase = kagent.getKnowledgeBase(); assertEquals(1, kbase.getKnowledgePackages().size()); assertEquals(3, kbase.getKnowledgePackages().iterator().next() .getRules().size()); // after some waiting we change number of rules in decision table, // scanner should notice the change Thread.sleep(1100); file.delete(); fileManager.write(file, second); Thread.sleep(1100); try { kbase = kagent.getKnowledgeBase(); // fails here - see surefire report, knowledge agent fails to load the change assertEquals(1, kbase.getKnowledgePackages().size()); assertEquals(2, kbase.getKnowledgePackages().iterator().next() .getRules().size()); } finally { ResourceFactory.getResourceChangeNotifierService().stop(); ResourceFactory.getResourceChangeScannerService().stop(); file.delete(); kagent.dispose(); } } /** * * Test to reproduce bug - failure of ResourceChangeScanner when trying to scan for XLS resource. * * May be related to similar ResourceChangeScanner bug, where scanner fails on scanning Guvnor's PKG. * https://bugzilla.redhat.com/show_bug.cgi?id=733008 * * Maybe the reason is that both are compiled resources. * * @version BRMS 5.2.0 ER4 * @author jsvitak@redhat.com * */ @Test @Ignore public void testXLSByResourceChangeScanner() throws Exception { // first file File ruleFile = new File(TMP_DIR + "sample.xls"); copy(getClass().getResourceAsStream("sample.xls"), new FileOutputStream(ruleFile)); // changeset String XLS_CHANGESET = "<change-set xmlns=\"http://drools.org/drools-5.0/change-set\"\n" + " xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xs:schemaLocation=\"http://drools.org/drools-5.0/change-set http://anonsvn.jboss.org/repos/labs/labs/jbossrules/trunk/drools-api/src/main/resources/change-set-1.0.0.xsd\">\n" + " <add>\n" + " <resource source=\"file:" + ruleFile.getAbsolutePath() + "\" type=\"DTABLE\">\n" + " <decisiontable-conf input-type=\"XLS\" worksheet-name=\"Tables\"/>\n" + " </resource>" + " </add>\n" + "</change-set>\n"; File xlsChangeset = new File(TMP_DIR + "xlsChangeset.xml"); xlsChangeset.deleteOnExit(); writeToFile(xlsChangeset, XLS_CHANGESET); // scan every second ResourceChangeScannerConfiguration config = ResourceFactory.getResourceChangeScannerService().newResourceChangeScannerConfiguration(); config.setProperty("drools.resource.scanner.interval", "1"); ResourceFactory.getResourceChangeScannerService().configure(config); // create knowledge agent KnowledgeAgent kagent = KnowledgeAgentFactory.newKnowledgeAgent("xls agent"); kagent.applyChangeSet(ResourceFactory.newFileResource(xlsChangeset)); KnowledgeBase kbase = kagent.getKnowledgeBase(); // --------------------------------------------------------------- // start scanning service - scanner's thread throws exception here // --------------------------------------------------------------- ResourceFactory.getResourceChangeNotifierService().start(); ResourceFactory.getResourceChangeScannerService().start(); assertEquals( 2, kbase.getKnowledgePackages().size() ); assertEquals( 3, kbase.getKnowledgePackages().iterator().next().getRules().size() ); // sleeping and modifying content Thread.sleep(1500); ruleFile.delete(); ruleFile = new File(TMP_DIR + "sample.xls"); copy(getClass().getResourceAsStream("sample2.xls"), new FileOutputStream(ruleFile)); Thread.sleep(1000); assertEquals(kbase.getKnowledgePackages().size(), 1); assertEquals(kbase.getKnowledgePackages().iterator().next().getRules().size(), 3); // there should be just 2 rules now, but scanner didn't notice the change kbase = kagent.getKnowledgeBase(); assertEquals( 2, kbase.getKnowledgePackages().size() ); assertEquals( 2, kbase.getKnowledgePackages().iterator().next().getRules().size() ); // stop scanning service ResourceFactory.getResourceChangeNotifierService().stop(); ResourceFactory.getResourceChangeScannerService().stop(); // file could remain and we will see, that it has changed, but scanner didn't register that //ruleFile.delete(); kagent.dispose(); } private static void copy(InputStream in, OutputStream out) throws IOException { // Transfer bytes from in to out byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } in.close(); out.close(); } private static void writeToFile(File file, String content) throws Exception { FileWriter fw = null; try { fw = new FileWriter(file); fw.write(content); } finally { if (fw != null) fw.close(); } } public static class Person { private int id; public void setId(int id) { this.id = id; } public int getId() { return id; } } public class Message implements Serializable { private static final long serialVersionUID = -7176392345381065685L; private String message; public Message() { message = ""; } public Message(String message) { this.message = message; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } @Override public String toString() { return "org.jboss.qa.drools.domain.Message[message='" + message + "']"; } } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.kotlin.idea.quickfix; import com.intellij.testFramework.TestDataPath; import org.jetbrains.kotlin.test.JUnit3RunnerWithInners; import org.jetbrains.kotlin.test.KotlinTestUtils; import org.jetbrains.kotlin.test.TestMetadata; import org.jetbrains.kotlin.test.TestRoot; import org.junit.runner.RunWith; /** * This class is generated by {@link org.jetbrains.kotlin.testGenerator.generator.TestGenerator}. * DO NOT MODIFY MANUALLY. */ @SuppressWarnings("all") @TestRoot("idea/tests") @TestDataPath("$CONTENT_ROOT") @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix") public abstract class QuickFixMultiModuleTestGenerated extends AbstractQuickFixMultiModuleTest { @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/accessibilityChecker") public static class AccessibilityChecker extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("annotationOnClass") public void testAnnotationOnClass() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/annotationOnClass/"); } @TestMetadata("classPrimaryConstructor") public void testClassPrimaryConstructor() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/classPrimaryConstructor/"); } @TestMetadata("classSecondaryConstructor") public void testClassSecondaryConstructor() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/classSecondaryConstructor/"); } @TestMetadata("classUpperBounds") public void testClassUpperBounds() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/classUpperBounds/"); } @TestMetadata("errorType") public void testErrorType() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/errorType/"); } @TestMetadata("memberFunction") public void testMemberFunction() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/memberFunction/"); } @TestMetadata("memberFunctionAndNestedClass") public void testMemberFunctionAndNestedClass() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/memberFunctionAndNestedClass/"); } @TestMetadata("memberFunctionParentType") public void testMemberFunctionParentType() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/memberFunctionParentType/"); } @TestMetadata("memberFunctionParentType2") public void testMemberFunctionParentType2() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/memberFunctionParentType2/"); } @TestMetadata("nestedClassWithTypeParam") public void testNestedClassWithTypeParam() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/nestedClassWithTypeParam/"); } @TestMetadata("topLevelFunParameter") public void testTopLevelFunParameter() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/topLevelFunParameter/"); } @TestMetadata("topLevelFunReturnType") public void testTopLevelFunReturnType() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/topLevelFunReturnType/"); } @TestMetadata("topLevelFunTypeParameter") public void testTopLevelFunTypeParameter() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/topLevelFunTypeParameter/"); } @TestMetadata("topLevelFunUpperBounds") public void testTopLevelFunUpperBounds() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/topLevelFunUpperBounds/"); } @TestMetadata("topLevelFunUpperBounds2") public void testTopLevelFunUpperBounds2() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/topLevelFunUpperBounds2/"); } @TestMetadata("topLevelProperty") public void testTopLevelProperty() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/topLevelProperty/"); } @TestMetadata("topLevelPropertyTypeParam2") public void testTopLevelPropertyTypeParam2() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/topLevelPropertyTypeParam2/"); } @TestMetadata("topLevelPropertyTypeParamBound") public void testTopLevelPropertyTypeParamBound() throws Exception { runTest("testData/multiModuleQuickFix/accessibilityChecker/topLevelPropertyTypeParamBound/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/addMissingActualMembers") public static class AddMissingActualMembers extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("classFunction") public void testClassFunction() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/classFunction/"); } @TestMetadata("classFunctionSameSignature") public void testClassFunctionSameSignature() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/classFunctionSameSignature/"); } @TestMetadata("classFunctionWithConstructor") public void testClassFunctionWithConstructor() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/classFunctionWithConstructor/"); } @TestMetadata("classFunctionWithConstructorAndParameters") public void testClassFunctionWithConstructorAndParameters() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/classFunctionWithConstructorAndParameters/"); } @TestMetadata("classFunctionWithIncompatibleConstructor") public void testClassFunctionWithIncompatibleConstructor() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/classFunctionWithIncompatibleConstructor/"); } @TestMetadata("classOverloadedFunction") public void testClassOverloadedFunction() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/classOverloadedFunction/"); } @TestMetadata("classPropertyInConstructor") public void testClassPropertyInConstructor() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/classPropertyInConstructor/"); } @TestMetadata("classSomeProperties") public void testClassSomeProperties() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/classSomeProperties/"); } @TestMetadata("classWithIncompilableFunction") public void testClassWithIncompilableFunction() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/classWithIncompilableFunction/"); } @TestMetadata("companionAbsence") public void testCompanionAbsence() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/companionAbsence/"); } @TestMetadata("membersWithIncorrectType") public void testMembersWithIncorrectType() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/membersWithIncorrectType/"); } @TestMetadata("primaryConstructorAbsence") public void testPrimaryConstructorAbsence() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/primaryConstructorAbsence/"); } @TestMetadata("propertyWithIncorrectType") public void testPropertyWithIncorrectType() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/propertyWithIncorrectType/"); } @TestMetadata("secondaryConstructorAbsence") public void testSecondaryConstructorAbsence() throws Exception { runTest("testData/multiModuleQuickFix/addMissingActualMembers/secondaryConstructorAbsence/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/addThrowAnnotation") public static class AddThrowAnnotation extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("common") public void testCommon() throws Exception { runTest("testData/multiModuleQuickFix/addThrowAnnotation/common/"); } @TestMetadata("js") public void testJs() throws Exception { runTest("testData/multiModuleQuickFix/addThrowAnnotation/js/"); } @TestMetadata("jvm") public void testJvm() throws Exception { runTest("testData/multiModuleQuickFix/addThrowAnnotation/jvm/"); } @TestMetadata("jvmWithoutStdlib") public void testJvmWithoutStdlib() throws Exception { runTest("testData/multiModuleQuickFix/addThrowAnnotation/jvmWithoutStdlib/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/canSealedSubClassBeObject") public static class CanSealedSubClassBeObject extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("notConvertActualSubClass") public void testNotConvertActualSubClass() throws Exception { runTest("testData/multiModuleQuickFix/canSealedSubClassBeObject/notConvertActualSubClass/"); } @TestMetadata("notConvertExpectSubClass") public void testNotConvertExpectSubClass() throws Exception { runTest("testData/multiModuleQuickFix/canSealedSubClassBeObject/notConvertExpectSubClass/"); } @TestMetadata("notConvertImplicitExpectSubClass") public void testNotConvertImplicitExpectSubClass() throws Exception { runTest("testData/multiModuleQuickFix/canSealedSubClassBeObject/notConvertImplicitExpectSubClass/"); } @TestMetadata("notGenerateEqualsAndHashCodeForSealedInCommon") public void testNotGenerateEqualsAndHashCodeForSealedInCommon() throws Exception { runTest("testData/multiModuleQuickFix/canSealedSubClassBeObject/notGenerateEqualsAndHashCodeForSealedInCommon/"); } @TestMetadata("notGenerateEqualsAndHashCodeForSealedInJvmForExpect") public void testNotGenerateEqualsAndHashCodeForSealedInJvmForExpect() throws Exception { runTest("testData/multiModuleQuickFix/canSealedSubClassBeObject/notGenerateEqualsAndHashCodeForSealedInJvmForExpect/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/changeModifier") public static class ChangeModifier extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("internal") public void testInternal() throws Exception { runTest("testData/multiModuleQuickFix/changeModifier/internal/"); } @TestMetadata("public") public void testPublic() throws Exception { runTest("testData/multiModuleQuickFix/changeModifier/public/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/changeSignature") public static class ChangeSignature extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("actual") public void testActual() throws Exception { runTest("testData/multiModuleQuickFix/changeSignature/actual/"); } @TestMetadata("expect") public void testExpect() throws Exception { runTest("testData/multiModuleQuickFix/changeSignature/expect/"); } @TestMetadata("override") public void testOverride() throws Exception { runTest("testData/multiModuleQuickFix/changeSignature/override/"); } @TestMetadata("override2") public void testOverride2() throws Exception { runTest("testData/multiModuleQuickFix/changeSignature/override2/"); } @TestMetadata("override3") public void testOverride3() throws Exception { runTest("testData/multiModuleQuickFix/changeSignature/override3/"); } @TestMetadata("override4") public void testOverride4() throws Exception { runTest("testData/multiModuleQuickFix/changeSignature/override4/"); } @TestMetadata("override5") public void testOverride5() throws Exception { runTest("testData/multiModuleQuickFix/changeSignature/override5/"); } @TestMetadata("override6") public void testOverride6() throws Exception { runTest("testData/multiModuleQuickFix/changeSignature/override6/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/createActual") public abstract static class CreateActual extends AbstractQuickFixMultiModuleTest { @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/createActual") public static class TestBucket001 extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("abstract") public void testAbstract() throws Exception { runTest("testData/multiModuleQuickFix/createActual/abstract/"); } @TestMetadata("abstractClassWithJdk") public void testAbstractClassWithJdk() throws Exception { runTest("testData/multiModuleQuickFix/createActual/abstractClassWithJdk/"); } @TestMetadata("annotation") public void testAnnotation() throws Exception { runTest("testData/multiModuleQuickFix/createActual/annotation/"); } @TestMetadata("annotationOptionalExpectation") public void testAnnotationOptionalExpectation() throws Exception { runTest("testData/multiModuleQuickFix/createActual/annotationOptionalExpectation/"); } @TestMetadata("annotationOptionalExpectationNoDir") public void testAnnotationOptionalExpectationNoDir() throws Exception { runTest("testData/multiModuleQuickFix/createActual/annotationOptionalExpectationNoDir/"); } @TestMetadata("annotationWithComment") public void testAnnotationWithComment() throws Exception { runTest("testData/multiModuleQuickFix/createActual/annotationWithComment/"); } @TestMetadata("class") public void testClass() throws Exception { runTest("testData/multiModuleQuickFix/createActual/class/"); } @TestMetadata("classWithBase") public void testClassWithBase() throws Exception { runTest("testData/multiModuleQuickFix/createActual/classWithBase/"); } @TestMetadata("classWithJdk") public void testClassWithJdk() throws Exception { runTest("testData/multiModuleQuickFix/createActual/classWithJdk/"); } @TestMetadata("classWithSuperTypeFromOtherPackage") public void testClassWithSuperTypeFromOtherPackage() throws Exception { runTest("testData/multiModuleQuickFix/createActual/classWithSuperTypeFromOtherPackage/"); } @TestMetadata("constructorWithDelegation") public void testConstructorWithDelegation() throws Exception { runTest("testData/multiModuleQuickFix/createActual/constructorWithDelegation/"); } @TestMetadata("constructorWithJdk") public void testConstructorWithJdk() throws Exception { runTest("testData/multiModuleQuickFix/createActual/constructorWithJdk/"); } @TestMetadata("createWithImport") public void testCreateWithImport() throws Exception { runTest("testData/multiModuleQuickFix/createActual/createWithImport/"); } @TestMetadata("defaultParameterInExpected") public void testDefaultParameterInExpected() throws Exception { runTest("testData/multiModuleQuickFix/createActual/defaultParameterInExpected/"); } @TestMetadata("defaultParameterInExpectedClass") public void testDefaultParameterInExpectedClass() throws Exception { runTest("testData/multiModuleQuickFix/createActual/defaultParameterInExpectedClass/"); } @TestMetadata("defaultParameterInExpectedConstructor") public void testDefaultParameterInExpectedConstructor() throws Exception { runTest("testData/multiModuleQuickFix/createActual/defaultParameterInExpectedConstructor/"); } @TestMetadata("enum") public void testEnum() throws Exception { runTest("testData/multiModuleQuickFix/createActual/enum/"); } @TestMetadata("function") public void testFunction() throws Exception { runTest("testData/multiModuleQuickFix/createActual/function/"); } @TestMetadata("functionSameFile") public void testFunctionSameFile() throws Exception { runTest("testData/multiModuleQuickFix/createActual/functionSameFile/"); } @TestMetadata("inlineClass") public void testInlineClass() throws Exception { runTest("testData/multiModuleQuickFix/createActual/inlineClass/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/createActual") public static class TestBucket002 extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("interface") public void testInterface() throws Exception { runTest("testData/multiModuleQuickFix/createActual/interface/"); } @TestMetadata("nested") public void testNested() throws Exception { runTest("testData/multiModuleQuickFix/createActual/nested/"); } @TestMetadata("object") public void testObject() throws Exception { runTest("testData/multiModuleQuickFix/createActual/object/"); } @TestMetadata("package") public void testPackage() throws Exception { runTest("testData/multiModuleQuickFix/createActual/package/"); } @TestMetadata("packageIncorrect") public void testPackageIncorrect() throws Exception { runTest("testData/multiModuleQuickFix/createActual/packageIncorrect/"); } @TestMetadata("packageIncorrectEmpty") public void testPackageIncorrectEmpty() throws Exception { runTest("testData/multiModuleQuickFix/createActual/packageIncorrectEmpty/"); } @TestMetadata("primaryConstructor") public void testPrimaryConstructor() throws Exception { runTest("testData/multiModuleQuickFix/createActual/primaryConstructor/"); } @TestMetadata("property") public void testProperty() throws Exception { runTest("testData/multiModuleQuickFix/createActual/property/"); } @TestMetadata("sealed") public void testSealed() throws Exception { runTest("testData/multiModuleQuickFix/createActual/sealed/"); } @TestMetadata("sealedSubclass") public void testSealedSubclass() throws Exception { runTest("testData/multiModuleQuickFix/createActual/sealedSubclass/"); } @TestMetadata("valueClass") public void testValueClass() throws Exception { runTest("testData/multiModuleQuickFix/createActual/valueClass/"); } @TestMetadata("withFakeJvm") public void testWithFakeJvm() throws Exception { runTest("testData/multiModuleQuickFix/createActual/withFakeJvm/"); } @TestMetadata("withRootPackage") public void testWithRootPackage() throws Exception { runTest("testData/multiModuleQuickFix/createActual/withRootPackage/"); } @TestMetadata("withTest") public void testWithTest() throws Exception { runTest("testData/multiModuleQuickFix/createActual/withTest/"); } @TestMetadata("withTestDummy") public void testWithTestDummy() throws Exception { runTest("testData/multiModuleQuickFix/createActual/withTestDummy/"); } } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/createExpect") public abstract static class CreateExpect extends AbstractQuickFixMultiModuleTest { @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/createExpect") public static class TestBucket001 extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("annotation") public void testAnnotation() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/annotation/"); } @TestMetadata("annotation2") public void testAnnotation2() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/annotation2/"); } @TestMetadata("class") public void testClass() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/class/"); } @TestMetadata("classWithAnnotation") public void testClassWithAnnotation() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/classWithAnnotation/"); } @TestMetadata("classWithSuperClassAndTypeParameter") public void testClassWithSuperClassAndTypeParameter() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/classWithSuperClassAndTypeParameter/"); } @TestMetadata("classWithSuperTypeFromOtherPackage") public void testClassWithSuperTypeFromOtherPackage() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/classWithSuperTypeFromOtherPackage/"); } @TestMetadata("commented") public void testCommented() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/commented/"); } @TestMetadata("companion") public void testCompanion() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/companion/"); } @TestMetadata("createWithImport") public void testCreateWithImport() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/createWithImport/"); } @TestMetadata("dataClass") public void testDataClass() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/dataClass/"); } @TestMetadata("enum") public void testEnum() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/enum/"); } @TestMetadata("enumComplex") public void testEnumComplex() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/enumComplex/"); } @TestMetadata("enumEmpty") public void testEnumEmpty() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/enumEmpty/"); } @TestMetadata("funWithAccessibleAlias") public void testFunWithAccessibleAlias() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/funWithAccessibleAlias/"); } @TestMetadata("funWithAccessibleExpansion") public void testFunWithAccessibleExpansion() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/funWithAccessibleExpansion/"); } @TestMetadata("funWithAccessibleParameter") public void testFunWithAccessibleParameter() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/funWithAccessibleParameter/"); } @TestMetadata("funWithAccessibleTypeFromCommon") public void testFunWithAccessibleTypeFromCommon() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/funWithAccessibleTypeFromCommon/"); } @TestMetadata("funWithJdk") public void testFunWithJdk() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/funWithJdk/"); } @TestMetadata("funWithPrivateModifier") public void testFunWithPrivateModifier() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/funWithPrivateModifier/"); } @TestMetadata("function") public void testFunction() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/function/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/createExpect") public static class TestBucket002 extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("function2") public void testFunction2() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/function2/"); } @TestMetadata("functionInInterface") public void testFunctionInInterface() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/functionInInterface/"); } @TestMetadata("functionWithImplementationInInterface") public void testFunctionWithImplementationInInterface() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/functionWithImplementationInInterface/"); } @TestMetadata("functionWithImplementationInInterface2") public void testFunctionWithImplementationInInterface2() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/functionWithImplementationInInterface2/"); } @TestMetadata("hierarchy") public void testHierarchy() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/hierarchy/"); } @TestMetadata("inlineClass") public void testInlineClass() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/inlineClass/"); } @TestMetadata("inlineClass2") public void testInlineClass2() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/inlineClass2/"); } @TestMetadata("innerClassWithTypeParam") public void testInnerClassWithTypeParam() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/innerClassWithTypeParam/"); } @TestMetadata("innerClassWithTypeParam2") public void testInnerClassWithTypeParam2() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/innerClassWithTypeParam2/"); } @TestMetadata("innerEnum") public void testInnerEnum() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/innerEnum/"); } @TestMetadata("memberFunctionAndNestedClass") public void testMemberFunctionAndNestedClass() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/memberFunctionAndNestedClass/"); } @TestMetadata("nestedClass") public void testNestedClass() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/nestedClass/"); } @TestMetadata("noAccessOnMember") public void testNoAccessOnMember() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/noAccessOnMember/"); } @TestMetadata("onMember") public void testOnMember() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/onMember/"); } @TestMetadata("primaryConstructor") public void testPrimaryConstructor() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/primaryConstructor/"); } @TestMetadata("property") public void testProperty() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/property/"); } @TestMetadata("property2") public void testProperty2() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/property2/"); } @TestMetadata("propertyInConstructor") public void testPropertyInConstructor() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/propertyInConstructor/"); } @TestMetadata("propertyWithConstModifier") public void testPropertyWithConstModifier() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/propertyWithConstModifier/"); } @TestMetadata("propertyWithLateinitModifier") public void testPropertyWithLateinitModifier() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/propertyWithLateinitModifier/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/createExpect") public static class TestBucket003 extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("propertyWithPrivateModifier") public void testPropertyWithPrivateModifier() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/propertyWithPrivateModifier/"); } @TestMetadata("sealedClass") public void testSealedClass() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/sealedClass/"); } @TestMetadata("stdlibWithJavaAlias") public void testStdlibWithJavaAlias() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/stdlibWithJavaAlias/"); } @TestMetadata("superTypeFromStdlib") public void testSuperTypeFromStdlib() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/superTypeFromStdlib/"); } @TestMetadata("topLevelFunctionWithAnnotations") public void testTopLevelFunctionWithAnnotations() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/topLevelFunctionWithAnnotations/"); } @TestMetadata("topLevelPropertyWithTypeParam") public void testTopLevelPropertyWithTypeParam() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/topLevelPropertyWithTypeParam/"); } @TestMetadata("typeAlias") public void testTypeAlias() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/typeAlias/"); } @TestMetadata("withAliases") public void testWithAliases() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/withAliases/"); } @TestMetadata("withAnnotations") public void testWithAnnotations() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/withAnnotations/"); } @TestMetadata("withConstructorWithParametersWithoutValVar") public void testWithConstructorWithParametersWithoutValVar() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/withConstructorWithParametersWithoutValVar/"); } @TestMetadata("withInitializer") public void testWithInitializer() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/withInitializer/"); } @TestMetadata("withPlatformNested") public void testWithPlatformNested() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/withPlatformNested/"); } @TestMetadata("withRootPackage") public void testWithRootPackage() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/withRootPackage/"); } @TestMetadata("withSecondaryConstructor") public void testWithSecondaryConstructor() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/withSecondaryConstructor/"); } @TestMetadata("withSecondaryConstructor2") public void testWithSecondaryConstructor2() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/withSecondaryConstructor2/"); } @TestMetadata("withSupertype") public void testWithSupertype() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/withSupertype/"); } @TestMetadata("withVararg") public void testWithVararg() throws Exception { runTest("testData/multiModuleQuickFix/createExpect/withVararg/"); } } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/fixNativeThrowsErrors") public static class FixNativeThrowsErrors extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("addCancellationException1") public void testAddCancellationException1() throws Exception { runTest("testData/multiModuleQuickFix/fixNativeThrowsErrors/addCancellationException1/"); } @TestMetadata("addCancellationException2") public void testAddCancellationException2() throws Exception { runTest("testData/multiModuleQuickFix/fixNativeThrowsErrors/addCancellationException2/"); } @TestMetadata("addCancellationException3") public void testAddCancellationException3() throws Exception { runTest("testData/multiModuleQuickFix/fixNativeThrowsErrors/addCancellationException3/"); } @TestMetadata("addCancellationException4") public void testAddCancellationException4() throws Exception { runTest("testData/multiModuleQuickFix/fixNativeThrowsErrors/addCancellationException4/"); } @TestMetadata("removeEmptyThrows") public void testRemoveEmptyThrows() throws Exception { runTest("testData/multiModuleQuickFix/fixNativeThrowsErrors/removeEmptyThrows/"); } @TestMetadata("removeThrowsOnIncompatibleOverride") public void testRemoveThrowsOnIncompatibleOverride() throws Exception { runTest("testData/multiModuleQuickFix/fixNativeThrowsErrors/removeThrowsOnIncompatibleOverride/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/functionTypeReceiverToParameter") public static class FunctionTypeReceiverToParameter extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("classFunctionConstructor") public void testClassFunctionConstructor() throws Exception { runTest("testData/multiModuleQuickFix/functionTypeReceiverToParameter/classFunctionConstructor/"); } @TestMetadata("classFunctionParameter") public void testClassFunctionParameter() throws Exception { runTest("testData/multiModuleQuickFix/functionTypeReceiverToParameter/classFunctionParameter/"); } @TestMetadata("classFunctionReturn") public void testClassFunctionReturn() throws Exception { runTest("testData/multiModuleQuickFix/functionTypeReceiverToParameter/classFunctionReturn/"); } @TestMetadata("classProperty") public void testClassProperty() throws Exception { runTest("testData/multiModuleQuickFix/functionTypeReceiverToParameter/classProperty/"); } @TestMetadata("functionParameter") public void testFunctionParameter() throws Exception { runTest("testData/multiModuleQuickFix/functionTypeReceiverToParameter/functionParameter/"); } @TestMetadata("functionReturn") public void testFunctionReturn() throws Exception { runTest("testData/multiModuleQuickFix/functionTypeReceiverToParameter/functionReturn/"); } @TestMetadata("interfaceFunctionParameter") public void testInterfaceFunctionParameter() throws Exception { runTest("testData/multiModuleQuickFix/functionTypeReceiverToParameter/interfaceFunctionParameter/"); } @TestMetadata("interfaceFunctionReturn") public void testInterfaceFunctionReturn() throws Exception { runTest("testData/multiModuleQuickFix/functionTypeReceiverToParameter/interfaceFunctionReturn/"); } @TestMetadata("interfaceProperty") public void testInterfaceProperty() throws Exception { runTest("testData/multiModuleQuickFix/functionTypeReceiverToParameter/interfaceProperty/"); } @TestMetadata("property") public void testProperty() throws Exception { runTest("testData/multiModuleQuickFix/functionTypeReceiverToParameter/property/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/initializeProperty") public static class InitializeProperty extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("notInitializeNonActualParameterWithConstructorParameter") public void testNotInitializeNonActualParameterWithConstructorParameter() throws Exception { runTest("testData/multiModuleQuickFix/initializeProperty/notInitializeNonActualParameterWithConstructorParameter/"); } @TestMetadata("notInitializeWithConstructorParameter") public void testNotInitializeWithConstructorParameter() throws Exception { runTest("testData/multiModuleQuickFix/initializeProperty/notInitializeWithConstructorParameter/"); } @TestMetadata("notMoveNonActualParamterToActualConstructor") public void testNotMoveNonActualParamterToActualConstructor() throws Exception { runTest("testData/multiModuleQuickFix/initializeProperty/notMoveNonActualParamterToActualConstructor/"); } @TestMetadata("notMoveToActualConstructor") public void testNotMoveToActualConstructor() throws Exception { runTest("testData/multiModuleQuickFix/initializeProperty/notMoveToActualConstructor/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/makeOverridenMemberOpen") public static class MakeOverridenMemberOpen extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("actual") public void testActual() throws Exception { runTest("testData/multiModuleQuickFix/makeOverridenMemberOpen/actual/"); } @TestMetadata("expect") public void testExpect() throws Exception { runTest("testData/multiModuleQuickFix/makeOverridenMemberOpen/expect/"); } @TestMetadata("hasAbstract") public void testHasAbstract() throws Exception { runTest("testData/multiModuleQuickFix/makeOverridenMemberOpen/hasAbstract/"); } @TestMetadata("hasOpen") public void testHasOpen() throws Exception { runTest("testData/multiModuleQuickFix/makeOverridenMemberOpen/hasOpen/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/other") public abstract static class Other extends AbstractQuickFixMultiModuleTest { @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/other") public static class TestBucket001 extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("actualImplementAsConstructorParam") public void testActualImplementAsConstructorParam() throws Exception { runTest("testData/multiModuleQuickFix/other/actualImplementAsConstructorParam/"); } @TestMetadata("actualNoImplementAsConstructorParam") public void testActualNoImplementAsConstructorParam() throws Exception { runTest("testData/multiModuleQuickFix/other/actualNoImplementAsConstructorParam/"); } @TestMetadata("actualWithoutExpect") public void testActualWithoutExpect() throws Exception { runTest("testData/multiModuleQuickFix/other/actualWithoutExpect/"); } @TestMetadata("addActualToClass") public void testAddActualToClass() throws Exception { runTest("testData/multiModuleQuickFix/other/addActualToClass/"); } @TestMetadata("addActualToClassMember") public void testAddActualToClassMember() throws Exception { runTest("testData/multiModuleQuickFix/other/addActualToClassMember/"); } @TestMetadata("addActualToTopLevelMember") public void testAddActualToTopLevelMember() throws Exception { runTest("testData/multiModuleQuickFix/other/addActualToTopLevelMember/"); } @TestMetadata("addAnnotationTargetToActual") public void testAddAnnotationTargetToActual() throws Exception { runTest("testData/multiModuleQuickFix/other/addAnnotationTargetToActual/"); } @TestMetadata("addAnnotationTargetToExpect") public void testAddAnnotationTargetToExpect() throws Exception { runTest("testData/multiModuleQuickFix/other/addAnnotationTargetToExpect/"); } @TestMetadata("addFunctionToCommonClassFromJavaUsage") public void testAddFunctionToCommonClassFromJavaUsage() throws Exception { runTest("testData/multiModuleQuickFix/other/addFunctionToCommonClassFromJavaUsage/"); } @TestMetadata("addOperatorByActual") public void testAddOperatorByActual() throws Exception { runTest("testData/multiModuleQuickFix/other/addOperatorByActual/"); } @TestMetadata("addOperatorByExpect") public void testAddOperatorByExpect() throws Exception { runTest("testData/multiModuleQuickFix/other/addOperatorByExpect/"); } @TestMetadata("cancelMakeAbstractFromActual") public void testCancelMakeAbstractFromActual() throws Exception { runTest("testData/multiModuleQuickFix/other/cancelMakeAbstractFromActual/"); } @TestMetadata("convertActualEnumToSealedClass") public void testConvertActualEnumToSealedClass() throws Exception { runTest("testData/multiModuleQuickFix/other/convertActualEnumToSealedClass/"); } @TestMetadata("convertActualSealedClassToEnum") public void testConvertActualSealedClassToEnum() throws Exception { runTest("testData/multiModuleQuickFix/other/convertActualSealedClassToEnum/"); } @TestMetadata("convertExpectEnumToSealedClass") public void testConvertExpectEnumToSealedClass() throws Exception { runTest("testData/multiModuleQuickFix/other/convertExpectEnumToSealedClass/"); } @TestMetadata("convertExpectSealedClassToEnum") public void testConvertExpectSealedClassToEnum() throws Exception { runTest("testData/multiModuleQuickFix/other/convertExpectSealedClassToEnum/"); } @TestMetadata("convertPropertyGetterToInitializer") public void testConvertPropertyGetterToInitializer() throws Exception { runTest("testData/multiModuleQuickFix/other/convertPropertyGetterToInitializer/"); } @TestMetadata("convertPropertyToFunction") public void testConvertPropertyToFunction() throws Exception { runTest("testData/multiModuleQuickFix/other/convertPropertyToFunction/"); } @TestMetadata("createClassFromUsageImport") public void testCreateClassFromUsageImport() throws Exception { runTest("testData/multiModuleQuickFix/other/createClassFromUsageImport/"); } @TestMetadata("createClassFromUsageRef") public void testCreateClassFromUsageRef() throws Exception { runTest("testData/multiModuleQuickFix/other/createClassFromUsageRef/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/other") public static class TestBucket002 extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("createFunInExpectClass") public void testCreateFunInExpectClass() throws Exception { runTest("testData/multiModuleQuickFix/other/createFunInExpectClass/"); } @TestMetadata("createTestOnExpect") public void testCreateTestOnExpect() throws Exception { runTest("testData/multiModuleQuickFix/other/createTestOnExpect/"); } @TestMetadata("createValInExpectClass") public void testCreateValInExpectClass() throws Exception { runTest("testData/multiModuleQuickFix/other/createValInExpectClass/"); } @TestMetadata("createVarInExpectClass") public void testCreateVarInExpectClass() throws Exception { runTest("testData/multiModuleQuickFix/other/createVarInExpectClass/"); } @TestMetadata("deprecatedHeader") public void testDeprecatedHeader() throws Exception { runTest("testData/multiModuleQuickFix/other/deprecatedHeader/"); } @TestMetadata("deprecatedHeaderImpl") public void testDeprecatedHeaderImpl() throws Exception { runTest("testData/multiModuleQuickFix/other/deprecatedHeaderImpl/"); } @TestMetadata("deprecatedImpl") public void testDeprecatedImpl() throws Exception { runTest("testData/multiModuleQuickFix/other/deprecatedImpl/"); } @TestMetadata("deprecatedImplHeader") public void testDeprecatedImplHeader() throws Exception { runTest("testData/multiModuleQuickFix/other/deprecatedImplHeader/"); } @TestMetadata("functionTypeParameterToReceiverByHeader") public void testFunctionTypeParameterToReceiverByHeader() throws Exception { runTest("testData/multiModuleQuickFix/other/functionTypeParameterToReceiverByHeader/"); } @TestMetadata("functionTypeParameterToReceiverByImpl") public void testFunctionTypeParameterToReceiverByImpl() throws Exception { runTest("testData/multiModuleQuickFix/other/functionTypeParameterToReceiverByImpl/"); } @TestMetadata("functionTypeReceiverToParameterByHeader") public void testFunctionTypeReceiverToParameterByHeader() throws Exception { runTest("testData/multiModuleQuickFix/other/functionTypeReceiverToParameterByHeader/"); } @TestMetadata("functionTypeReceiverToParameterByImpl") public void testFunctionTypeReceiverToParameterByImpl() throws Exception { runTest("testData/multiModuleQuickFix/other/functionTypeReceiverToParameterByImpl/"); } @TestMetadata("generateEqualsInExpect") public void testGenerateEqualsInExpect() throws Exception { runTest("testData/multiModuleQuickFix/other/generateEqualsInExpect/"); } @TestMetadata("generateHashCodeInExpect") public void testGenerateHashCodeInExpect() throws Exception { runTest("testData/multiModuleQuickFix/other/generateHashCodeInExpect/"); } @TestMetadata("implementAbstractExpectMemberInheritedFromInterface") public void testImplementAbstractExpectMemberInheritedFromInterface() throws Exception { runTest("testData/multiModuleQuickFix/other/implementAbstractExpectMemberInheritedFromInterface/"); } @TestMetadata("implementMembersInActualClassNoExpectMember") public void testImplementMembersInActualClassNoExpectMember() throws Exception { runTest("testData/multiModuleQuickFix/other/implementMembersInActualClassNoExpectMember/"); } @TestMetadata("implementMembersInImplClassNonImplInheritor") public void testImplementMembersInImplClassNonImplInheritor() throws Exception { runTest("testData/multiModuleQuickFix/other/implementMembersInImplClassNonImplInheritor/"); } @TestMetadata("importClassInCommon") public void testImportClassInCommon() throws Exception { runTest("testData/multiModuleQuickFix/other/importClassInCommon/"); } @TestMetadata("importClassInFromProductionInCommonTests") public void testImportClassInFromProductionInCommonTests() throws Exception { runTest("testData/multiModuleQuickFix/other/importClassInFromProductionInCommonTests/"); } @TestMetadata("importCommonClassInJs") public void testImportCommonClassInJs() throws Exception { runTest("testData/multiModuleQuickFix/other/importCommonClassInJs/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/other") public static class TestBucket003 extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("importCommonClassInJvm") public void testImportCommonClassInJvm() throws Exception { runTest("testData/multiModuleQuickFix/other/importCommonClassInJvm/"); } @TestMetadata("importCommonFunInJvm") public void testImportCommonFunInJvm() throws Exception { runTest("testData/multiModuleQuickFix/other/importCommonFunInJvm/"); } @TestMetadata("importExpectClassWithActualInJvm") public void testImportExpectClassWithActualInJvm() throws Exception { runTest("testData/multiModuleQuickFix/other/importExpectClassWithActualInJvm/"); } @TestMetadata("importExpectClassWithoutActualInJvm") public void testImportExpectClassWithoutActualInJvm() throws Exception { runTest("testData/multiModuleQuickFix/other/importExpectClassWithoutActualInJvm/"); } @TestMetadata("importFunInCommon") public void testImportFunInCommon() throws Exception { runTest("testData/multiModuleQuickFix/other/importFunInCommon/"); } @TestMetadata("makeAbstractFromActual") public void testMakeAbstractFromActual() throws Exception { runTest("testData/multiModuleQuickFix/other/makeAbstractFromActual/"); } @TestMetadata("makeInlineFromExpect") public void testMakeInlineFromExpect() throws Exception { runTest("testData/multiModuleQuickFix/other/makeInlineFromExpect/"); } @TestMetadata("makeInternalFromExpect") public void testMakeInternalFromExpect() throws Exception { runTest("testData/multiModuleQuickFix/other/makeInternalFromExpect/"); } @TestMetadata("makeOpenFromActual") public void testMakeOpenFromActual() throws Exception { runTest("testData/multiModuleQuickFix/other/makeOpenFromActual/"); } @TestMetadata("makeOpenFromExpect") public void testMakeOpenFromExpect() throws Exception { runTest("testData/multiModuleQuickFix/other/makeOpenFromExpect/"); } @TestMetadata("mayBeConstantWithActual") public void testMayBeConstantWithActual() throws Exception { runTest("testData/multiModuleQuickFix/other/mayBeConstantWithActual/"); } @TestMetadata("memberFunParameterToReceiverByHeader") public void testMemberFunParameterToReceiverByHeader() throws Exception { runTest("testData/multiModuleQuickFix/other/memberFunParameterToReceiverByHeader/"); } @TestMetadata("memberFunParameterToReceiverByImpl") public void testMemberFunParameterToReceiverByImpl() throws Exception { runTest("testData/multiModuleQuickFix/other/memberFunParameterToReceiverByImpl/"); } @TestMetadata("memberFunReceiverToParameterByHeader") public void testMemberFunReceiverToParameterByHeader() throws Exception { runTest("testData/multiModuleQuickFix/other/memberFunReceiverToParameterByHeader/"); } @TestMetadata("memberFunReceiverToParameterByImpl") public void testMemberFunReceiverToParameterByImpl() throws Exception { runTest("testData/multiModuleQuickFix/other/memberFunReceiverToParameterByImpl/"); } @TestMetadata("memberFunToExtensionByHeader") public void testMemberFunToExtensionByHeader() throws Exception { runTest("testData/multiModuleQuickFix/other/memberFunToExtensionByHeader/"); } @TestMetadata("memberFunToExtensionByImpl") public void testMemberFunToExtensionByImpl() throws Exception { runTest("testData/multiModuleQuickFix/other/memberFunToExtensionByImpl/"); } @TestMetadata("memberValToExtensionByHeader") public void testMemberValToExtensionByHeader() throws Exception { runTest("testData/multiModuleQuickFix/other/memberValToExtensionByHeader/"); } @TestMetadata("memberValToExtensionByHeaderWithInapplicableImpl") public void testMemberValToExtensionByHeaderWithInapplicableImpl() throws Exception { runTest("testData/multiModuleQuickFix/other/memberValToExtensionByHeaderWithInapplicableImpl/"); } @TestMetadata("memberValToExtensionByImpl") public void testMemberValToExtensionByImpl() throws Exception { runTest("testData/multiModuleQuickFix/other/memberValToExtensionByImpl/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/other") public static class TestBucket004 extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("moveActualPropertyToExistentConstructor") public void testMoveActualPropertyToExistentConstructor() throws Exception { runTest("testData/multiModuleQuickFix/other/moveActualPropertyToExistentConstructor/"); } @TestMetadata("movePropertyToConstructor") public void testMovePropertyToConstructor() throws Exception { runTest("testData/multiModuleQuickFix/other/movePropertyToConstructor/"); } @TestMetadata("notMakeAbstractFromActual") public void testNotMakeAbstractFromActual() throws Exception { runTest("testData/multiModuleQuickFix/other/notMakeAbstractFromActual/"); } @TestMetadata("orderHeader") public void testOrderHeader() throws Exception { runTest("testData/multiModuleQuickFix/other/orderHeader/"); } @TestMetadata("orderImpl") public void testOrderImpl() throws Exception { runTest("testData/multiModuleQuickFix/other/orderImpl/"); } @TestMetadata("safeDeleteForbiddenFromActual") public void testSafeDeleteForbiddenFromActual() throws Exception { runTest("testData/multiModuleQuickFix/other/safeDeleteForbiddenFromActual/"); } @TestMetadata("safeDeleteFromActual") public void testSafeDeleteFromActual() throws Exception { runTest("testData/multiModuleQuickFix/other/safeDeleteFromActual/"); } @TestMetadata("safeDeleteUsedInAnotherPlatform") public void testSafeDeleteUsedInAnotherPlatform() throws Exception { runTest("testData/multiModuleQuickFix/other/safeDeleteUsedInAnotherPlatform/"); } } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/packageDirectoryMismatch") public static class PackageDirectoryMismatch extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("moveFileToAnotherPackage") public void testMoveFileToAnotherPackage() throws Exception { runTest("testData/multiModuleQuickFix/packageDirectoryMismatch/moveFileToAnotherPackage/"); } @TestMetadata("moveFileToCommonSourceRoot") public void testMoveFileToCommonSourceRoot() throws Exception { runTest("testData/multiModuleQuickFix/packageDirectoryMismatch/moveFileToCommonSourceRoot/"); } @TestMetadata("moveFileToJvmSourceRoot") public void testMoveFileToJvmSourceRoot() throws Exception { runTest("testData/multiModuleQuickFix/packageDirectoryMismatch/moveFileToJvmSourceRoot/"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiModuleQuickFix/redundantNullableReturnType") public static class RedundantNullableReturnType extends AbstractQuickFixMultiModuleTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("actualMethod") public void testActualMethod() throws Exception { runTest("testData/multiModuleQuickFix/redundantNullableReturnType/actualMethod/"); } @TestMetadata("actualTopLevelFunction") public void testActualTopLevelFunction() throws Exception { runTest("testData/multiModuleQuickFix/redundantNullableReturnType/actualTopLevelFunction/"); } @TestMetadata("actualTopLevelProperty") public void testActualTopLevelProperty() throws Exception { runTest("testData/multiModuleQuickFix/redundantNullableReturnType/actualTopLevelProperty/"); } @TestMetadata("expectMemberProperty") public void testExpectMemberProperty() throws Exception { runTest("testData/multiModuleQuickFix/redundantNullableReturnType/expectMemberProperty/"); } @TestMetadata("expectTopLevelFunction") public void testExpectTopLevelFunction() throws Exception { runTest("testData/multiModuleQuickFix/redundantNullableReturnType/expectTopLevelFunction/"); } @TestMetadata("expectTopLevelProperty") public void testExpectTopLevelProperty() throws Exception { runTest("testData/multiModuleQuickFix/redundantNullableReturnType/expectTopLevelProperty/"); } } }
/* * Copyright (C) 2009 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.common.escape; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.annotations.GwtCompatible; import java.util.Map; import javax.annotation.CheckForNull; import org.checkerframework.checker.nullness.qual.Nullable; /** * A {@link UnicodeEscaper} that uses an array to quickly look up replacement characters for a given * code point. An additional safe range is provided that determines whether code points without * specific replacements are to be considered safe and left unescaped or should be escaped in a * general way. * * <p>A good example of usage of this class is for HTML escaping where the replacement array * contains information about the named HTML entities such as {@code &amp;} and {@code &quot;} while * {@link #escapeUnsafe} is overridden to handle general escaping of the form {@code &#NNNNN;}. * * <p>The size of the data structure used by {@link ArrayBasedUnicodeEscaper} is proportional to the * highest valued code point that requires escaping. For example a replacement map containing the * single character '{@code \}{@code u1000}' will require approximately 16K of memory. If you need * to create multiple escaper instances that have the same character replacement mapping consider * using {@link ArrayBasedEscaperMap}. * * @author David Beaumont * @since 15.0 */ @GwtCompatible @ElementTypesAreNonnullByDefault public abstract class ArrayBasedUnicodeEscaper extends UnicodeEscaper { // The replacement array (see ArrayBasedEscaperMap). private final char[][] replacements; // The number of elements in the replacement array. private final int replacementsLength; // The first code point in the safe range. private final int safeMin; // The last code point in the safe range. private final int safeMax; // Cropped values used in the fast path range checks. private final char safeMinChar; private final char safeMaxChar; /** * Creates a new ArrayBasedUnicodeEscaper instance with the given replacement map and specified * safe range. If {@code safeMax < safeMin} then no code points are considered safe. * * <p>If a code point has no mapped replacement then it is checked against the safe range. If it * lies outside that, then {@link #escapeUnsafe} is called, otherwise no escaping is performed. * * @param replacementMap a map of characters to their escaped representations * @param safeMin the lowest character value in the safe range * @param safeMax the highest character value in the safe range * @param unsafeReplacement the default replacement for unsafe characters or null if no default * replacement is required */ protected ArrayBasedUnicodeEscaper( Map<Character, String> replacementMap, int safeMin, int safeMax, @Nullable String unsafeReplacement) { this(ArrayBasedEscaperMap.create(replacementMap), safeMin, safeMax, unsafeReplacement); } /** * Creates a new ArrayBasedUnicodeEscaper instance with the given replacement map and specified * safe range. If {@code safeMax < safeMin} then no code points are considered safe. This * initializer is useful when explicit instances of ArrayBasedEscaperMap are used to allow the * sharing of large replacement mappings. * * <p>If a code point has no mapped replacement then it is checked against the safe range. If it * lies outside that, then {@link #escapeUnsafe} is called, otherwise no escaping is performed. * * @param escaperMap the map of replacements * @param safeMin the lowest character value in the safe range * @param safeMax the highest character value in the safe range * @param unsafeReplacement the default replacement for unsafe characters or null if no default * replacement is required */ protected ArrayBasedUnicodeEscaper( ArrayBasedEscaperMap escaperMap, int safeMin, int safeMax, @Nullable String unsafeReplacement) { checkNotNull(escaperMap); // GWT specific check (do not optimize) this.replacements = escaperMap.getReplacementArray(); this.replacementsLength = replacements.length; if (safeMax < safeMin) { // If the safe range is empty, set the range limits to opposite extremes // to ensure the first test of either value will fail. safeMax = -1; safeMin = Integer.MAX_VALUE; } this.safeMin = safeMin; this.safeMax = safeMax; // This is a bit of a hack but lets us do quicker per-character checks in // the fast path code. The safe min/max values are very unlikely to extend // into the range of surrogate characters, but if they do we must not test // any values in that range. To see why, consider the case where: // safeMin <= {hi,lo} <= safeMax // where {hi,lo} are characters forming a surrogate pair such that: // codePointOf(hi, lo) > safeMax // which would result in the surrogate pair being (wrongly) considered safe. // If we clip the safe range used during the per-character tests so it is // below the values of characters in surrogate pairs, this cannot occur. // This approach does mean that we break out of the fast path code in cases // where we don't strictly need to, but this situation will almost never // occur in practice. if (safeMin >= Character.MIN_HIGH_SURROGATE) { // The safe range is empty or the all safe code points lie in or above the // surrogate range. Either way the character range is empty. this.safeMinChar = Character.MAX_VALUE; this.safeMaxChar = 0; } else { // The safe range is non empty and contains values below the surrogate // range but may extend above it. We may need to clip the maximum value. this.safeMinChar = (char) safeMin; this.safeMaxChar = (char) Math.min(safeMax, Character.MIN_HIGH_SURROGATE - 1); } } /* * This is overridden to improve performance. Rough benchmarking shows that this almost doubles * the speed when processing strings that do not require any escaping. */ @Override public final String escape(String s) { checkNotNull(s); // GWT specific check (do not optimize) for (int i = 0; i < s.length(); i++) { char c = s.charAt(i); if ((c < replacementsLength && replacements[c] != null) || c > safeMaxChar || c < safeMinChar) { return escapeSlow(s, i); } } return s; } /** * Escapes a single Unicode code point using the replacement array and safe range values. If the * given character does not have an explicit replacement and lies outside the safe range then * {@link #escapeUnsafe} is called. * * @return the replacement characters, or {@code null} if no escaping was required */ @Override @CheckForNull protected final char[] escape(int cp) { if (cp < replacementsLength) { char[] chars = replacements[cp]; if (chars != null) { return chars; } } if (cp >= safeMin && cp <= safeMax) { return null; } return escapeUnsafe(cp); } /* Overridden for performance. */ @Override protected final int nextEscapeIndex(CharSequence csq, int index, int end) { while (index < end) { char c = csq.charAt(index); if ((c < replacementsLength && replacements[c] != null) || c > safeMaxChar || c < safeMinChar) { break; } index++; } return index; } /** * Escapes a code point that has no direct explicit value in the replacement array and lies * outside the stated safe range. Subclasses should override this method to provide generalized * escaping for code points if required. * * <p>Note that arrays returned by this method must not be modified once they have been returned. * However it is acceptable to return the same array multiple times (even for different input * characters). * * @param cp the Unicode code point to escape * @return the replacement characters, or {@code null} if no escaping was required */ @CheckForNull protected abstract char[] escapeUnsafe(int cp); }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.engine.test.bpmn.event.error; import java.util.HashMap; import java.util.List; import java.util.Map; import org.flowable.engine.common.api.FlowableException; import org.flowable.engine.common.impl.history.HistoryLevel; import org.flowable.engine.common.impl.util.CollectionUtil; import org.flowable.engine.delegate.BpmnError; import org.flowable.engine.history.HistoricProcessInstance; import org.flowable.engine.impl.test.HistoryTestHelper; import org.flowable.engine.impl.test.PluggableFlowableTestCase; import org.flowable.engine.test.Deployment; /** * @author Joram Barrez * @author Tijs Rademakers */ public class BoundaryErrorEventTest extends PluggableFlowableTestCase { @Deployment public void testCatchErrorOnEmbeddedSubprocess() { runtimeService.startProcessInstanceByKey("boundaryErrorOnEmbeddedSubprocess"); // After process start, usertask in subprocess should exist org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertEquals("subprocessTask", task.getName()); // After task completion, error end event is reached and caught taskService.complete(task.getId()); task = taskService.createTaskQuery().singleResult(); assertEquals("task after catching the error", task.getName()); } public void testThrowErrorWithEmptyErrorCode() { try { repositoryService.createDeployment().addClasspathResource("org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testThrowErrorWithEmptyErrorCode.bpmn20.xml").deploy(); fail("ActivitiException expected"); } catch (FlowableException re) { } } @Deployment public void testCatchErrorOnEmbeddedSubprocessWithEmptyErrorCode() { testCatchErrorOnEmbeddedSubprocess(); } @Deployment public void testCatchErrorOnEmbeddedSubprocessWithoutErrorCode() { testCatchErrorOnEmbeddedSubprocess(); } @Deployment public void testCatchErrorOfInnerSubprocessOnOuterSubprocess() { runtimeService.startProcessInstanceByKey("boundaryErrorTest"); List<org.flowable.task.api.Task> tasks = taskService.createTaskQuery().orderByTaskName().asc().list(); assertEquals(2, tasks.size()); assertEquals("Inner subprocess task 1", tasks.get(0).getName()); assertEquals("Inner subprocess task 2", tasks.get(1).getName()); // Completing task 2, will cause the end error event to throw error with code 123 taskService.complete(tasks.get(1).getId()); taskService.createTaskQuery().list(); org.flowable.task.api.Task taskAfterError = taskService.createTaskQuery().singleResult(); assertEquals("task outside subprocess", taskAfterError.getName()); } @Deployment public void testCatchErrorInConcurrentEmbeddedSubprocesses() { assertErrorCaughtInConcurrentEmbeddedSubprocesses("boundaryEventTestConcurrentSubprocesses"); } @Deployment public void testCatchErrorInConcurrentEmbeddedSubprocessesThrownByScriptTask() { assertErrorCaughtInConcurrentEmbeddedSubprocesses("catchErrorInConcurrentEmbeddedSubprocessesThrownByScriptTask"); } private void assertErrorCaughtInConcurrentEmbeddedSubprocesses(String processDefinitionKey) { // Completing task A will lead to task D String procId = runtimeService.startProcessInstanceByKey(processDefinitionKey).getId(); List<org.flowable.task.api.Task> tasks = taskService.createTaskQuery().orderByTaskName().asc().list(); assertEquals(2, tasks.size()); assertEquals("task A", tasks.get(0).getName()); assertEquals("task B", tasks.get(1).getName()); taskService.complete(tasks.get(0).getId()); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertEquals("task D", task.getName()); taskService.complete(task.getId()); assertProcessEnded(procId); // Completing task B will lead to task C runtimeService.startProcessInstanceByKey(processDefinitionKey).getId(); tasks = taskService.createTaskQuery().orderByTaskName().asc().list(); assertEquals(2, tasks.size()); assertEquals("task A", tasks.get(0).getName()); assertEquals("task B", tasks.get(1).getName()); taskService.complete(tasks.get(1).getId()); tasks = taskService.createTaskQuery().orderByTaskName().asc().list(); assertEquals(2, tasks.size()); assertEquals("task A", tasks.get(0).getName()); assertEquals("task C", tasks.get(1).getName()); taskService.complete(tasks.get(1).getId()); task = taskService.createTaskQuery().singleResult(); assertEquals("task A", task.getName()); taskService.complete(task.getId()); task = taskService.createTaskQuery().singleResult(); assertEquals("task D", task.getName()); } @Deployment public void testDeeplyNestedErrorThrown() { // Input = 1 -> error1 will be thrown, which will destroy ALL BUT ONE // subprocess, which leads to an end event, which ultimately leads to // ending the process instance String procId = runtimeService.startProcessInstanceByKey("deeplyNestedErrorThrown").getId(); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertEquals("Nested task", task.getName()); taskService.complete(task.getId(), CollectionUtil.singletonMap("input", 1)); assertProcessEnded(procId); // Input == 2 -> error2 will be thrown, leading to a userTask outside // all subprocesses procId = runtimeService.startProcessInstanceByKey("deeplyNestedErrorThrown").getId(); task = taskService.createTaskQuery().singleResult(); assertEquals("Nested task", task.getName()); taskService.complete(task.getId(), CollectionUtil.singletonMap("input", 2)); task = taskService.createTaskQuery().singleResult(); assertEquals("task after catch", task.getName()); taskService.complete(task.getId()); assertProcessEnded(procId); } @Deployment public void testDeeplyNestedErrorThrownOnlyAutomaticSteps() { // input == 1 -> error2 is thrown -> caught on subprocess2 -> end event // in subprocess -> proc inst end 1 String procId = runtimeService.startProcessInstanceByKey("deeplyNestedErrorThrown", CollectionUtil.singletonMap("input", 1)).getId(); assertProcessEnded(procId); HistoricProcessInstance hip; if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, processEngineConfiguration)) { hip = historyService.createHistoricProcessInstanceQuery().processInstanceId(procId).singleResult(); assertEquals("processEnd1", hip.getEndActivityId()); } // input == 2 -> error2 is thrown -> caught on subprocess1 -> proc inst // end 2 procId = runtimeService.startProcessInstanceByKey("deeplyNestedErrorThrown", CollectionUtil.singletonMap("input", 1)).getId(); assertProcessEnded(procId); if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, processEngineConfiguration)) { hip = historyService.createHistoricProcessInstanceQuery().processInstanceId(procId).singleResult(); assertEquals("processEnd1", hip.getEndActivityId()); } } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testCatchErrorOnCallActivity-parent.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.subprocess.bpmn20.xml" }) public void testCatchErrorOnCallActivity() { String procId = runtimeService.startProcessInstanceByKey("catchErrorOnCallActivity").getId(); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertEquals("Task in subprocess", task.getName()); // Completing the task will reach the end error event, // which is caught on the call activity boundary taskService.complete(task.getId()); task = taskService.createTaskQuery().singleResult(); assertEquals("Escalated Task", task.getName()); // Completing the task will end the process instance taskService.complete(task.getId()); assertProcessEnded(procId); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.multipleErrorsCatch.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.multipleErrorsThrow.bpmn20.xml" }) public void testCatchMultipleErrorsOnCallActivity() { String procId = runtimeService.startProcessInstanceByKey("catchError").getId(); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertEquals("specificErrorTask", task.getTaskDefinitionKey()); taskService.complete(task.getId()); assertProcessEnded(procId); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.multipleErrorsCatch.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.multipleErrorsThrow2.bpmn20.xml" }) public void testCatchMultipleErrorsOnCallActivityNoSpecificError() { String procId = runtimeService.startProcessInstanceByKey("catchError").getId(); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertEquals("emptyErrorTask", task.getTaskDefinitionKey()); taskService.complete(task.getId()); assertProcessEnded(procId); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.callActivityWithErrorEndEventCatch.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.callActivityWithErrorEndEventThrow.bpmn20.xml" }) public void testCatchErrorEndEventOnCallActivity() { String procId = runtimeService.startProcessInstanceByKey("catchError").getId(); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertEquals("specificErrorTask", task.getTaskDefinitionKey()); taskService.complete(task.getId()); assertProcessEnded(procId); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.callActivityWithErrorEndEventCatch.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.callActivityWithErrorEndEventThrow2.bpmn20.xml" }) public void testCatchErrorEndEventOnCallActivityNoSpecificError() { String procId = runtimeService.startProcessInstanceByKey("catchError").getId(); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertEquals("emptyErrorTask", task.getTaskDefinitionKey()); taskService.complete(task.getId()); assertProcessEnded(procId); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.subprocess.bpmn20.xml" }) public void testUncaughtError() { runtimeService.startProcessInstanceByKey("simpleSubProcess"); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertEquals("Task in subprocess", task.getName()); try { // Completing the task will reach the end error event, which is never caught in the process taskService.complete(task.getId()); fail("No catching boundary event found for error with errorCode 'myError', neither in same process nor in parent process but no Exception is thrown"); } catch (BpmnError e) { assertTextPresent("No catching boundary event found for error with errorCode 'myError', neither in same process nor in parent process", e.getMessage()); } } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testUncaughtErrorOnCallActivity-parent.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.subprocess.bpmn20.xml" }) public void testUncaughtErrorOnCallActivity() { runtimeService.startProcessInstanceByKey("uncaughtErrorOnCallActivity"); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertEquals("Task in subprocess", task.getName()); try { // Completing the task will reach the end error event, // which is never caught in the process taskService.complete(task.getId()); } catch (BpmnError e) { assertTextPresent("No catching boundary event found for error with errorCode 'myError', neither in same process nor in parent process", e.getMessage()); } } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testCatchErrorThrownByCallActivityOnSubprocess.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.subprocess.bpmn20.xml" }) public void testCatchErrorThrownByCallActivityOnSubprocess() { String procId = runtimeService.startProcessInstanceByKey("catchErrorOnSubprocess").getId(); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertEquals("Task in subprocess", task.getName()); // Completing the task will reach the end error event, // which is caught on the call activity boundary taskService.complete(task.getId()); task = taskService.createTaskQuery().singleResult(); assertEquals("Escalated Task", task.getName()); // Completing the task will end the process instance taskService.complete(task.getId()); assertProcessEnded(procId); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testCatchErrorThrownByCallActivityOnCallActivity.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.subprocess2ndLevel.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.subprocess.bpmn20.xml" }) public void testCatchErrorThrownByCallActivityOnCallActivity() throws InterruptedException { String procId = runtimeService.startProcessInstanceByKey("catchErrorOnCallActivity2ndLevel").getId(); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertEquals("Task in subprocess", task.getName()); taskService.complete(task.getId()); task = taskService.createTaskQuery().singleResult(); assertEquals("Escalated Task", task.getName()); // Completing the task will end the process instance taskService.complete(task.getId()); assertProcessEnded(procId); } @Deployment public void testCatchErrorOnParallelMultiInstance() { String procId = runtimeService.startProcessInstanceByKey("catchErrorOnParallelMi").getId(); List<org.flowable.task.api.Task> tasks = taskService.createTaskQuery().list(); assertEquals(5, tasks.size()); // Complete two subprocesses, just to make it a bit more complex Map<String, Object> vars = new HashMap<>(); vars.put("throwError", false); taskService.complete(tasks.get(2).getId(), vars); taskService.complete(tasks.get(3).getId(), vars); // Reach the error event vars.put("throwError", true); taskService.complete(tasks.get(1).getId(), vars); assertEquals(0, taskService.createTaskQuery().count()); assertProcessEnded(procId); } @Deployment public void testCatchErrorOnSequentialMultiInstance() { String procId = runtimeService.startProcessInstanceByKey("catchErrorOnSequentialMi").getId(); // complete one task Map<String, Object> vars = new HashMap<>(); vars.put("throwError", false); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); taskService.complete(task.getId(), vars); // complete second task and throw error vars.put("throwError", true); task = taskService.createTaskQuery().singleResult(); taskService.complete(task.getId(), vars); assertProcessEnded(procId); } @Deployment public void testCatchErrorThrownByJavaDelegateOnServiceTask() { String procId = runtimeService.startProcessInstanceByKey("catchErrorThrownByJavaDelegateOnServiceTask").getId(); assertThatErrorHasBeenCaught(procId); } @Deployment public void testCatchErrorThrownByJavaDelegateOnServiceTaskNotCancelActivity() { String procId = runtimeService.startProcessInstanceByKey("catchErrorThrownByJavaDelegateOnServiceTaskNotCancelActiviti").getId(); assertThatErrorHasBeenCaught(procId); } @Deployment public void testCatchErrorThrownByJavaDelegateOnServiceTaskWithErrorCode() { String procId = runtimeService.startProcessInstanceByKey("catchErrorThrownByJavaDelegateOnServiceTaskWithErrorCode").getId(); assertThatErrorHasBeenCaught(procId); } @Deployment public void testCatchErrorThrownByJavaDelegateOnEmbeddedSubProcess() { String procId = runtimeService.startProcessInstanceByKey("catchErrorThrownByJavaDelegateOnEmbeddedSubProcess").getId(); assertThatErrorHasBeenCaught(procId); } @Deployment public void testCatchErrorThrownByJavaDelegateOnEmbeddedSubProcessInduction() { String procId = runtimeService.startProcessInstanceByKey("catchErrorThrownByJavaDelegateOnEmbeddedSubProcessInduction").getId(); assertThatErrorHasBeenCaught(procId); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testCatchErrorThrownByJavaDelegateOnCallActivity-parent.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testCatchErrorThrownByJavaDelegateOnCallActivity-child.bpmn20.xml" }) public void testCatchErrorThrownByJavaDelegateOnCallActivity() { String procId = runtimeService.startProcessInstanceByKey("catchErrorThrownByJavaDelegateOnCallActivity-parent").getId(); assertThatErrorHasBeenCaught(procId); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testCatchErrorThrownByJavaDelegateOnCallActivity-child.bpmn20.xml" }) public void testUncaughtErrorThrownByJavaDelegateOnServiceTask() { try { runtimeService.startProcessInstanceByKey("catchErrorThrownByJavaDelegateOnCallActivity-child"); } catch (BpmnError e) { assertTextPresent("No catching boundary event found for error with errorCode '23', neither in same process nor in parent process", e.getMessage()); } } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testUncaughtErrorThrownByJavaDelegateOnCallActivity-parent.bpmn20.xml", "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testCatchErrorThrownByJavaDelegateOnCallActivity-child.bpmn20.xml" }) public void testUncaughtErrorThrownByJavaDelegateOnCallActivity() { try { runtimeService.startProcessInstanceByKey("uncaughtErrorThrownByJavaDelegateOnCallActivity-parent"); } catch (BpmnError e) { assertTextPresent("No catching boundary event found for error with errorCode '23', neither in same process nor in parent process", e.getMessage()); } } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testCatchErrorOnGroovyScriptTask.bpmn20.xml" }) public void testCatchErrorOnGroovyScriptTask() { String procId = runtimeService.startProcessInstanceByKey("catchErrorOnScriptTask").getId(); assertProcessEnded(procId); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testCatchErrorOnJavaScriptScriptTask.bpmn20.xml" }) public void testCatchErrorOnJavaScriptScriptTask() { String procId = runtimeService.startProcessInstanceByKey("catchErrorOnScriptTask").getId(); assertProcessEnded(procId); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testUncaughtErrorOnScriptTaskWithEmptyErrorEventDefinition.bpmn20.xml" }) public void testUncaughtErrorOnScriptTaskWithEmptyErrorEventDefinition() { String procId = runtimeService.startProcessInstanceByKey("uncaughtErrorOnScriptTaskWithEmptyErrorEventDefinition").getId(); assertProcessEnded(procId); } @Deployment(resources = { "org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testUncaughtErrorOnScriptTask.bpmn20.xml" }) public void testUncaughtErrorOnScriptTask() { try { String procId = runtimeService.startProcessInstanceByKey("uncaughtErrorOnScriptTask").getId(); fail("The script throws error event with errorCode 'errorUncaught', but no catching boundary event was defined. An exception is expected which did not occur"); assertProcessEnded(procId); } catch (BpmnError e) { assertTextPresent("No catching boundary event found for error with errorCode 'errorUncaught', neither in same process nor in parent process", e.getMessage()); } } @Deployment public void testCatchErrorThrownByJavaDelegateOnMultiInstanceServiceTaskSequential() { Map<String, Object> variables = new HashMap<>(); variables.put("executionsBeforeError", 2); String procId = runtimeService.startProcessInstanceByKey("catchErrorThrownByJavaDelegateOnMultiInstanceServiceTaskSequential", variables).getId(); assertThatErrorHasBeenCaught(procId); } @Deployment public void testCatchErrorThrownByJavaDelegateOnMultiInstanceServiceTaskParallel() { Map<String, Object> variables = new HashMap<>(); variables.put("executionsBeforeError", 2); String procId = runtimeService.startProcessInstanceByKey("catchErrorThrownByJavaDelegateOnMultiInstanceServiceTaskParallel", variables).getId(); assertThatErrorHasBeenCaught(procId); } @Deployment public void testErrorThrownByJavaDelegateNotCaughtByOtherEventType() { String procId = runtimeService.startProcessInstanceByKey("testErrorThrownByJavaDelegateNotCaughtByOtherEventType").getId(); assertThatErrorHasBeenCaught(procId); } private void assertThatErrorHasBeenCaught(String procId) { // The service task will throw an error event, // which is caught on the service task boundary assertEquals("No tasks found in task list.", 1, taskService.createTaskQuery().count()); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertEquals("Escalated Task", task.getName()); // Completing the task will end the process instance taskService.complete(task.getId()); assertProcessEnded(procId); } @Deployment public void testConcurrentExecutionsInterruptedOnDestroyScope() { // this test makes sure that if the first concurrent execution destroys // the scope (due to the interrupting boundary catch), the second concurrent // execution does not move forward. // if the test fails, it produces a constraint violation in db. runtimeService.startProcessInstanceByKey("process"); } @Deployment public void testCatchErrorThrownByExpressionOnServiceTask() { HashMap<String, Object> variables = new HashMap<>(); variables.put("bpmnErrorBean", new BpmnErrorBean()); String procId = runtimeService.startProcessInstanceByKey("testCatchErrorThrownByExpressionOnServiceTask", variables).getId(); assertThatErrorHasBeenCaught(procId); } @Deployment public void testCatchErrorThrownByDelegateExpressionOnServiceTask() { HashMap<String, Object> variables = new HashMap<>(); variables.put("bpmnErrorBean", new BpmnErrorBean()); String procId = runtimeService.startProcessInstanceByKey("testCatchErrorThrownByDelegateExpressionOnServiceTask", variables).getId(); assertThatErrorHasBeenCaught(procId); } @Deployment public void testCatchErrorThrownByJavaDelegateProvidedByDelegateExpressionOnServiceTask() { HashMap<String, Object> variables = new HashMap<>(); variables.put("bpmnErrorBean", new BpmnErrorBean()); String procId = runtimeService.startProcessInstanceByKey("testCatchErrorThrownByJavaDelegateProvidedByDelegateExpressionOnServiceTask", variables).getId(); assertThatErrorHasBeenCaught(procId); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.main; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.function.Consumer; import java.util.function.Predicate; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.ExtendedCamelContext; import org.apache.camel.TypeConverters; import org.apache.camel.cloud.ServiceRegistry; import org.apache.camel.cluster.CamelClusterService; import org.apache.camel.health.HealthCheckRegistry; import org.apache.camel.health.HealthCheckRepository; import org.apache.camel.health.HealthCheckService; import org.apache.camel.model.Model; import org.apache.camel.processor.interceptor.BacklogTracer; import org.apache.camel.processor.interceptor.HandleFault; import org.apache.camel.spi.AsyncProcessorAwaitManager; import org.apache.camel.spi.ClassResolver; import org.apache.camel.spi.Debugger; import org.apache.camel.spi.EndpointStrategy; import org.apache.camel.spi.EventFactory; import org.apache.camel.spi.EventNotifier; import org.apache.camel.spi.ExecutorServiceManager; import org.apache.camel.spi.FactoryFinderResolver; import org.apache.camel.spi.InflightRepository; import org.apache.camel.spi.InterceptStrategy; import org.apache.camel.spi.LifecycleStrategy; import org.apache.camel.spi.LogListener; import org.apache.camel.spi.ManagementObjectNameStrategy; import org.apache.camel.spi.ManagementStrategy; import org.apache.camel.spi.MessageHistoryFactory; import org.apache.camel.spi.ModelJAXBContextFactory; import org.apache.camel.spi.NodeIdFactory; import org.apache.camel.spi.ProcessorFactory; import org.apache.camel.spi.ReactiveExecutor; import org.apache.camel.spi.Registry; import org.apache.camel.spi.RouteController; import org.apache.camel.spi.RoutePolicyFactory; import org.apache.camel.spi.RuntimeEndpointRegistry; import org.apache.camel.spi.ShutdownStrategy; import org.apache.camel.spi.StreamCachingStrategy; import org.apache.camel.spi.ThreadPoolFactory; import org.apache.camel.spi.ThreadPoolProfile; import org.apache.camel.spi.UnitOfWorkFactory; import org.apache.camel.spi.UuidGenerator; import org.apache.camel.support.jsse.GlobalSSLContextParametersSupplier; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * To configure the {@link DefaultConfigurationProperties} on {@link org.apache.camel.CamelContext} * used by Camel Main, Camel Spring Boot and other runtimes. */ public final class DefaultConfigurationConfigurer { public static final Logger LOG = LoggerFactory.getLogger(DefaultConfigurationConfigurer.class); private DefaultConfigurationConfigurer() { } /** * Configures the {@link CamelContext} with the configuration. * * @param camelContext the camel context * @param config the configuration */ public static void configure(CamelContext camelContext, DefaultConfigurationProperties config) throws Exception { if (!config.isJmxEnabled()) { camelContext.disableJMX(); } if (config.getName() != null) { camelContext.adapt(ExtendedCamelContext.class).setName(config.getName()); } if (config.getShutdownTimeout() > 0) { camelContext.getShutdownStrategy().setTimeout(config.getShutdownTimeout()); } camelContext.getShutdownStrategy().setSuppressLoggingOnTimeout(config.isShutdownSuppressLoggingOnTimeout()); camelContext.getShutdownStrategy().setShutdownNowOnTimeout(config.isShutdownNowOnTimeout()); camelContext.getShutdownStrategy().setShutdownRoutesInReverseOrder(config.isShutdownRoutesInReverseOrder()); camelContext.getShutdownStrategy().setLogInflightExchangesOnTimeout(config.isShutdownLogInflightExchangesOnTimeout()); if (config.getLogDebugMaxChars() != 0) { camelContext.getGlobalOptions().put(Exchange.LOG_DEBUG_BODY_MAX_CHARS, "" + config.getLogDebugMaxChars()); } // stream caching camelContext.setStreamCaching(config.isStreamCachingEnabled()); camelContext.getStreamCachingStrategy().setAnySpoolRules(config.isStreamCachingAnySpoolRules()); camelContext.getStreamCachingStrategy().setBufferSize(config.getStreamCachingBufferSize()); camelContext.getStreamCachingStrategy().setRemoveSpoolDirectoryWhenStopping(config.isStreamCachingRemoveSpoolDirectoryWhenStopping()); camelContext.getStreamCachingStrategy().setSpoolCipher(config.getStreamCachingSpoolCipher()); if (config.getStreamCachingSpoolDirectory() != null) { camelContext.getStreamCachingStrategy().setSpoolDirectory(config.getStreamCachingSpoolDirectory()); } if (config.getStreamCachingSpoolThreshold() != 0) { camelContext.getStreamCachingStrategy().setSpoolThreshold(config.getStreamCachingSpoolThreshold()); } if (config.getStreamCachingSpoolUsedHeapMemoryLimit() != null) { StreamCachingStrategy.SpoolUsedHeapMemoryLimit limit; if ("Committed".equalsIgnoreCase(config.getStreamCachingSpoolUsedHeapMemoryLimit())) { limit = StreamCachingStrategy.SpoolUsedHeapMemoryLimit.Committed; } else if ("Max".equalsIgnoreCase(config.getStreamCachingSpoolUsedHeapMemoryLimit())) { limit = StreamCachingStrategy.SpoolUsedHeapMemoryLimit.Max; } else { throw new IllegalArgumentException("Invalid option " + config.getStreamCachingSpoolUsedHeapMemoryLimit() + " must either be Committed or Max"); } camelContext.getStreamCachingStrategy().setSpoolUsedHeapMemoryLimit(limit); } if (config.getStreamCachingSpoolUsedHeapMemoryThreshold() != 0) { camelContext.getStreamCachingStrategy().setSpoolUsedHeapMemoryThreshold(config.getStreamCachingSpoolUsedHeapMemoryThreshold()); } camelContext.setMessageHistory(config.isMessageHistory()); camelContext.setLogMask(config.isLogMask()); camelContext.setLogExhaustedMessageBody(config.isLogExhaustedMessageBody()); camelContext.setHandleFault(config.isHandleFault()); camelContext.setAutoStartup(config.isAutoStartup()); camelContext.setAllowUseOriginalMessage(config.isAllowUseOriginalMessage()); camelContext.setUseBreadcrumb(config.isUseBreadcrumb()); camelContext.setUseDataType(config.isUseDataType()); camelContext.setUseMDCLogging(config.isUseMdcLogging()); camelContext.setLoadTypeConverters(config.isLoadTypeConverters()); if (camelContext.getManagementStrategy().getManagementAgent() != null) { camelContext.getManagementStrategy().getManagementAgent().setEndpointRuntimeStatisticsEnabled(config.isEndpointRuntimeStatisticsEnabled()); camelContext.getManagementStrategy().getManagementAgent().setStatisticsLevel(config.getJmxManagementStatisticsLevel()); camelContext.getManagementStrategy().getManagementAgent().setManagementNamePattern(config.getJmxManagementNamePattern()); camelContext.getManagementStrategy().getManagementAgent().setCreateConnector(config.isJmxCreateConnector()); } camelContext.setTracing(config.isTracing()); if (config.getThreadNamePattern() != null) { camelContext.getExecutorServiceManager().setThreadNamePattern(config.getThreadNamePattern()); } if (config.getRouteFilterIncludePattern() != null || config.getRouteFilterExcludePattern() != null) { camelContext.getExtension(Model.class).setRouteFilterPattern(config.getRouteFilterIncludePattern(), config.getRouteFilterExcludePattern()); } } /** * Performs additional configuration to lookup beans of Camel types to configure * additional configurations on the Camel context. * <p/> * Similar code in camel-core-xml module in class org.apache.camel.core.xml.AbstractCamelContextFactoryBean * or in camel-spring-boot module in class org.apache.camel.spring.boot.CamelAutoConfiguration. */ public static void afterPropertiesSet(final CamelContext camelContext) throws Exception { final Registry registry = camelContext.getRegistry(); final ManagementStrategy managementStrategy = camelContext.getManagementStrategy(); final ExtendedCamelContext ecc = camelContext.adapt(ExtendedCamelContext.class); BacklogTracer bt = getSingleBeanOfType(registry, BacklogTracer.class); if (bt != null) { ecc.setExtension(BacklogTracer.class, bt); } HandleFault hf = getSingleBeanOfType(registry, HandleFault.class); if (hf != null) { ecc.addInterceptStrategy(hf); } InflightRepository ir = getSingleBeanOfType(registry, InflightRepository.class); if (ir != null) { ecc.setInflightRepository(ir); } AsyncProcessorAwaitManager apam = getSingleBeanOfType(registry, AsyncProcessorAwaitManager.class); if (apam != null) { ecc.setAsyncProcessorAwaitManager(apam); } ManagementStrategy ms = getSingleBeanOfType(registry, ManagementStrategy.class); if (ms != null) { ecc.setManagementStrategy(ms); } ManagementObjectNameStrategy mons = getSingleBeanOfType(registry, ManagementObjectNameStrategy.class); if (mons != null) { managementStrategy.setManagementObjectNameStrategy(mons); } EventFactory ef = getSingleBeanOfType(registry, EventFactory.class); if (ef != null) { managementStrategy.setEventFactory(ef); } UnitOfWorkFactory uowf = getSingleBeanOfType(registry, UnitOfWorkFactory.class); if (uowf != null) { ecc.setUnitOfWorkFactory(uowf); } RuntimeEndpointRegistry rer = getSingleBeanOfType(registry, RuntimeEndpointRegistry.class); if (rer != null) { ecc.setRuntimeEndpointRegistry(rer); } ModelJAXBContextFactory mjcf = getSingleBeanOfType(registry, ModelJAXBContextFactory.class); if (mjcf != null) { ecc.setModelJAXBContextFactory(mjcf); } ClassResolver cr = getSingleBeanOfType(registry, ClassResolver.class); if (cr != null) { ecc.setClassResolver(cr); } FactoryFinderResolver ffr = getSingleBeanOfType(registry, FactoryFinderResolver.class); if (ffr != null) { ecc.setFactoryFinderResolver(ffr); } RouteController rc = getSingleBeanOfType(registry, RouteController.class); if (rc != null) { ecc.setRouteController(rc); } UuidGenerator ug = getSingleBeanOfType(registry, UuidGenerator.class); if (ug != null) { ecc.setUuidGenerator(ug); } ExecutorServiceManager esm = getSingleBeanOfType(registry, ExecutorServiceManager.class); if (esm != null) { ecc.setExecutorServiceManager(esm); } ThreadPoolFactory tpf = getSingleBeanOfType(registry, ThreadPoolFactory.class); if (tpf != null) { ecc.getExecutorServiceManager().setThreadPoolFactory(tpf); } ProcessorFactory pf = getSingleBeanOfType(registry, ProcessorFactory.class); if (pf != null) { ecc.setProcessorFactory(pf); } Debugger debugger = getSingleBeanOfType(registry, Debugger.class); if (debugger != null) { ecc.setDebugger(debugger); } NodeIdFactory nif = getSingleBeanOfType(registry, NodeIdFactory.class); if (nif != null) { ecc.setNodeIdFactory(nif); } MessageHistoryFactory mhf = getSingleBeanOfType(registry, MessageHistoryFactory.class); if (mhf != null) { ecc.setMessageHistoryFactory(mhf); } ReactiveExecutor re = getSingleBeanOfType(registry, ReactiveExecutor.class); if (re != null) { ecc.setReactiveExecutor(re); } ShutdownStrategy ss = getSingleBeanOfType(registry, ShutdownStrategy.class); if (ss != null) { ecc.setShutdownStrategy(ss); } Set<TypeConverters> tcs = registry.findByType(TypeConverters.class); if (!tcs.isEmpty()) { tcs.forEach(t -> camelContext.getTypeConverterRegistry().addTypeConverters(t)); } Set<EndpointStrategy> ess = registry.findByType(EndpointStrategy.class); if (!ess.isEmpty()) { ess.forEach(ecc::registerEndpointCallback); } Set<CamelClusterService> csss = registry.findByType(CamelClusterService.class); if (!csss.isEmpty()) { for (CamelClusterService css : csss) { camelContext.addService(css); } } Set<RoutePolicyFactory> rpfs = registry.findByType(RoutePolicyFactory.class); if (!rpfs.isEmpty()) { rpfs.forEach(camelContext::addRoutePolicyFactory); } final Predicate<EventNotifier> containsEventNotifier = managementStrategy.getEventNotifiers()::contains; registerPropertiesForBeanTypesWithCondition(registry, EventNotifier.class, containsEventNotifier.negate(), managementStrategy::addEventNotifier); final Predicate<InterceptStrategy> containsInterceptStrategy = camelContext.adapt(ExtendedCamelContext.class).getInterceptStrategies()::contains; registerPropertiesForBeanTypesWithCondition(registry, InterceptStrategy.class, containsInterceptStrategy.negate(), camelContext.adapt(ExtendedCamelContext.class)::addInterceptStrategy); final Predicate<LifecycleStrategy> containsLifecycleStrategy = camelContext.getLifecycleStrategies()::contains; registerPropertiesForBeanTypesWithCondition(registry, LifecycleStrategy.class, containsLifecycleStrategy.negate(), camelContext::addLifecycleStrategy); final Predicate<LogListener> containsLogListener = camelContext.adapt(ExtendedCamelContext.class).getLogListeners()::contains; registerPropertiesForBeanTypesWithCondition(registry, LogListener.class, containsLogListener.negate(), camelContext.adapt(ExtendedCamelContext.class)::addLogListener); // service registry Map<String, ServiceRegistry> serviceRegistries = registry.findByTypeWithName(ServiceRegistry.class); if (serviceRegistries != null && !serviceRegistries.isEmpty()) { for (Map.Entry<String, ServiceRegistry> entry : serviceRegistries.entrySet()) { ServiceRegistry service = entry.getValue(); if (service.getId() == null) { service.setId(camelContext.getUuidGenerator().generateUuid()); } LOG.info("Using ServiceRegistry with id: {} and implementation: {}", service.getId(), service); camelContext.addService(service); } } // SSL context parameters GlobalSSLContextParametersSupplier sslContextParametersSupplier = getSingleBeanOfType(registry, GlobalSSLContextParametersSupplier.class); if (sslContextParametersSupplier != null) { camelContext.setSSLContextParameters(sslContextParametersSupplier.get()); } // health check HealthCheckRegistry healthCheckRegistry = getSingleBeanOfType(registry, HealthCheckRegistry.class); if (healthCheckRegistry != null) { healthCheckRegistry.setCamelContext(camelContext); LOG.info("Using HealthCheckRegistry: {}", healthCheckRegistry); camelContext.setExtension(HealthCheckRegistry.class, healthCheckRegistry); } else { healthCheckRegistry = HealthCheckRegistry.get(camelContext); healthCheckRegistry.setCamelContext(camelContext); } Set<HealthCheckRepository> hcrs = registry.findByType(HealthCheckRepository.class); if (!hcrs.isEmpty()) { hcrs.forEach(healthCheckRegistry::addRepository); } HealthCheckService hcs = getSingleBeanOfType(registry, HealthCheckService.class); if (hcs != null) { camelContext.addService(hcs); } // set the default thread pool profile if defined initThreadPoolProfiles(registry, camelContext); } private static <T> void registerPropertyForBeanType(final Registry registry, final Class<T> beanType, final Consumer<T> propertySetter) { T propertyBean = getSingleBeanOfType(registry, beanType); if (propertyBean == null) { return; } LOG.info("Using custom {}: {}", beanType.getSimpleName(), propertyBean); propertySetter.accept(propertyBean); } private static <T> T getSingleBeanOfType(Registry registry, Class<T> type) { Map<String, T> beans = registry.findByTypeWithName(type); if (beans.size() == 1) { return beans.values().iterator().next(); } else { return null; } } private static <T> void registerPropertiesForBeanTypes(final Registry registry, final Class<T> beanType, final Consumer<T> propertySetter) { registerPropertiesForBeanTypesWithCondition(registry, beanType, b -> true, propertySetter); } private static <T> void registerPropertiesForBeanTypesWithCondition(final Registry registry, final Class<T> beanType, final Predicate<T> condition, final Consumer<T> propertySetter) { final Map<String, T> beans = registry.findByTypeWithName(beanType); if (!ObjectHelper.isNotEmpty(beans)) { return; } final String simpleName = beanType.getSimpleName(); beans.forEach((name, bean) -> { if (condition.test(bean)) { LOG.info("Adding custom {} with id: {} and implementation: {}", simpleName, name, bean); propertySetter.accept(bean); } }); } private static <T> Consumer<T> addServiceToContext(final CamelContext camelContext) { return service -> { try { camelContext.addService(service); } catch (Exception e) { throw new RuntimeException("Unable to add service to Camel context", e); } }; } private static void initThreadPoolProfiles(Registry registry, CamelContext camelContext) { Set<String> defaultIds = new HashSet<>(); // lookup and use custom profiles from the registry Map<String, ThreadPoolProfile> profiles = registry.findByTypeWithName(ThreadPoolProfile.class); if (profiles != null && !profiles.isEmpty()) { for (Map.Entry<String, ThreadPoolProfile> entry : profiles.entrySet()) { ThreadPoolProfile profile = entry.getValue(); // do not add if already added, for instance a tracer that is also an InterceptStrategy class if (profile.isDefaultProfile()) { LOG.info("Using custom default ThreadPoolProfile with id: {} and implementation: {}", entry.getKey(), profile); camelContext.getExecutorServiceManager().setDefaultThreadPoolProfile(profile); defaultIds.add(entry.getKey()); } else { camelContext.getExecutorServiceManager().registerThreadPoolProfile(profile); } } } // validate at most one is defined if (defaultIds.size() > 1) { throw new IllegalArgumentException("Only exactly one default ThreadPoolProfile is allowed, was " + defaultIds.size() + " ids: " + defaultIds); } } }
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.glass.sample.timer; import android.app.Activity; import android.content.ComponentName; import android.content.Intent; import android.content.ServiceConnection; import android.os.Bundle; import android.os.IBinder; import android.os.Handler; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import java.lang.Runnable; /** * Activity showing the options menu. */ public class MenuActivity extends Activity { /** Request code for setting the timer, visible for testing. */ static final int SET_TIMER = 100; private final Handler mHandler = new Handler(); private Timer mTimer; private boolean mAttachedToWindow; private boolean mOptionsMenuOpen; private boolean mSettingTimer; private ServiceConnection mConnection = new ServiceConnection() { @Override public void onServiceConnected(ComponentName name, IBinder service) { if (service instanceof TimerService.TimerBinder) { mTimer = ((TimerService.TimerBinder) service).getTimer(); openOptionsMenu(); } // No need to keep the service bound. unbindService(this); } @Override public void onServiceDisconnected(ComponentName name) { // Nothing to do here. } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); bindService(new Intent(this, TimerService.class), mConnection, 0); } @Override public void onAttachedToWindow() { super.onAttachedToWindow(); mAttachedToWindow = true; openOptionsMenu(); } @Override public void onDetachedFromWindow() { super.onDetachedFromWindow(); mAttachedToWindow = false; } @Override public void openOptionsMenu() { if (!mOptionsMenuOpen && mAttachedToWindow && mTimer != null) { mOptionsMenuOpen = true; super.openOptionsMenu(); } } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.timer, menu); return true; } @Override public boolean onPrepareOptionsMenu(Menu menu) { final boolean timeSet = mTimer.getDurationMillis() != 0; setOptionsMenuGroupState(menu, R.id.no_time_set, !timeSet); setOptionsMenuGroupState(menu, R.id.time_set, timeSet); if (timeSet) { setOptionsMenuState( menu.findItem(R.id.start), !mTimer.isRunning() && !mTimer.isStarted()); setOptionsMenuState( menu.findItem(R.id.resume), !mTimer.isRunning() && mTimer.isStarted()); setOptionsMenuState( menu.findItem(R.id.pause), mTimer.isRunning() && mTimer.getRemainingTimeMillis() > 0); setOptionsMenuState(menu.findItem(R.id.reset), mTimer.isStarted()); } return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle item selection. switch (item.getItemId()) { case R.id.start: case R.id.resume: mTimer.start(); return true; case R.id.pause: mTimer.pause(); return true; case R.id.reset: mTimer.reset(); return true; case R.id.change_timer: case R.id.set_timer: // Start the new Activity at the end of the message queue for proper options menu // animation. This is only needed when starting a new Activity or stopping a Service // that published a LiveCard. post(new Runnable() { @Override public void run() { Intent setTimerIntent = new Intent(MenuActivity.this, SetTimerActivity.class); setTimerIntent.putExtra( SetTimerActivity.EXTRA_DURATION_MILLIS, mTimer.getDurationMillis()); startActivityForResult(setTimerIntent, SET_TIMER); } }); mTimer.reset(); mSettingTimer = true; return true; case R.id.stop: // Stop the service at the end of the message queue for proper options menu // animation. This is only needed when starting a new Activity or stopping a Service // that published a LiveCard. post(new Runnable() { @Override public void run() { stopService(new Intent(MenuActivity.this, TimerService.class)); } }); return true; default: return super.onOptionsItemSelected(item); } } @Override public void onOptionsMenuClosed(Menu menu) { mOptionsMenuOpen = false; if (!mSettingTimer) { // Nothing else to do, closing the Activity. finish(); } } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (resultCode == RESULT_OK && requestCode == SET_TIMER) { mTimer.setDurationMillis(data.getLongExtra(SetTimerActivity.EXTRA_DURATION_MILLIS, 0)); } finish(); } /** * Posts a {@link Runnable} at the end of the message loop, overridable for testing. */ protected void post(Runnable runnable) { mHandler.post(runnable); } /** * Sets a {@code MenuItem} visible and enabled state. */ private static void setOptionsMenuState(MenuItem menuItem, boolean enabled) { menuItem.setVisible(enabled); menuItem.setEnabled(enabled); } /** * Sets all menu items visible and enabled state that are in the given group. */ private static void setOptionsMenuGroupState(Menu menu, int groupId, boolean enabled) { menu.setGroupVisible(groupId, enabled); menu.setGroupEnabled(groupId, enabled); } }
// Copyright (c) 2014 Intel Corporation. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.xwalk.embedding.base; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.FutureTask; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.Timer; import java.util.TimerTask; import junit.framework.Assert; import org.chromium.content.browser.test.util.CallbackHelper; import org.chromium.content.browser.test.util.Criteria; import org.chromium.content.browser.test.util.CriteriaHelper; import org.chromium.net.test.util.TestWebServer; import org.chromium.ui.gfx.DeviceDisplayInfo; import org.xwalk.core.JavascriptInterface; import org.xwalk.core.XWalkCookieManager; import org.xwalk.core.XWalkDownloadListener; import org.xwalk.core.XWalkNavigationHistory; import org.xwalk.core.XWalkNavigationItem; import org.xwalk.core.XWalkView; import org.xwalk.embedding.MainActivity; import com.test.server.ActivityInstrumentationTestCase2; import android.content.Context; import android.content.res.AssetManager; import android.os.Bundle; import android.test.MoreAsserts; import android.util.Log; import android.util.Pair; import android.webkit.WebResourceResponse; public class XWalkViewTestBase extends ActivityInstrumentationTestCase2<MainActivity> { public XWalkViewTestBase(Class<MainActivity> activityClass) { super(activityClass); } protected final static String PASS_STRING = "Pass"; protected static final String EMPTY_PAGE = "<!doctype html>" + "<title>Set User Agent String Test</title><p>Set User Agent String Test.</p>"; protected static final String USER_AGENT = "Set User Agent String Test Mozilla/5.0 Apple Webkit Cosswalk Mobile Safari"; protected static final String EXPECTED_USER_AGENT = "\"Set User Agent String Test Mozilla/5.0 Apple Webkit Cosswalk Mobile Safari\""; protected static final int NUM_OF_CONSOLE_CALL = 10; protected static final String REDIRECT_TARGET_PATH = "/redirect_target.html"; protected static final String TITLE = "TITLE"; protected final String mExpectedStr = "xwalk"; protected static final String DATA_URL = "data:text/html,<div/>"; protected final static int WAIT_TIMEOUT_SECONDS = 15; protected final static long WAIT_TIMEOUT_MS = 2000; private final static int CHECK_INTERVAL = 100; private Timer mTimer = new Timer(); protected XWalkView mXWalkView; protected XWalkView mRestoreXWalkView; protected MainActivity mainActivity; protected TestWebServer mWebServer; protected XWalkCookieManager mCookieManager; protected final TestHelperBridge mTestHelperBridge = new TestHelperBridge(); private String mUrls[]=new String[3]; protected static final int NUM_NAVIGATIONS = 3; public static final String TITLES[] = { "page 1 title foo", "page 2 title bar", "page 3 title baz" }; private static final String PATHS[] = { "/p1foo.html", "/p2bar.html", "/p3baz.html", }; protected final String ALERT_TEXT = "Hello World!"; protected final String PROMPT_TEXT = "How do you like your eggs in the morning?"; protected final String PROMPT_DEFAULT = "Scrambled"; protected final String PROMPT_RESULT = "I like mine with a kiss"; final String CONFIRM_TEXT = "Would you like a cookie?"; protected final AtomicBoolean callbackCalled = new AtomicBoolean(false); final CallbackHelper jsBeforeUnloadHelper = new CallbackHelper(); boolean flagForConfirmCancelled = false; public XWalkViewTestBase() { super(MainActivity.class); } @Override protected void setUp() throws Exception { super.setUp(); mainActivity = (MainActivity) getActivity(); mWebServer = TestWebServer.start(); while(!mainActivity.isXWalkReady()) { try{ waitForTimerFinish(200); }catch(Exception e){} } getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mRestoreXWalkView = new XWalkView(getActivity(), getActivity()); mXWalkView = mainActivity.getXWalkView(); mXWalkView.setUIClient(new TestXWalkUIClient()); mXWalkView.setResourceClient(new TestXWalkResourceClient()); } }); } public void waitForTimerFinish(int timer) throws Exception { Object notify = new Object(); synchronized (notify) { NotifyTask testTask = new NotifyTask(notify); mTimer.schedule(testTask, timer); notify.wait(); } } public class NotifyTask extends TimerTask { private Object mObj; public NotifyTask(Object obj) { super(); mObj = obj; } @Override public void run() { synchronized (mObj) { mObj.notify(); } } } protected void loadUrlAsync(final String url) throws Exception { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.load(url, null); } }); } protected void loadUrlAsync(final String url,final String content) throws Exception { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.load(url, content); } }); } protected void loadDataAsync(final String url, final String data, final String mimeType, final boolean isBase64Encoded) throws Exception { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.load(url, data); } }); } protected String getTitleOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<String>() { @Override public String call() throws Exception { return mXWalkView.getTitle(); } }); } protected <R> R runTestOnUiThreadAndGetResult(Callable<R> callable) throws Exception { FutureTask<R> task = new FutureTask<R>(callable); getInstrumentation().waitForIdleSync(); getInstrumentation().runOnMainSync(task); return task.get(); } protected String getFileContent(String fileName) { try { Context context = getInstrumentation().getContext(); InputStream inputStream = context.getAssets().open(fileName); int size = inputStream.available(); byte buffer[] = new byte[size]; inputStream.read(buffer); inputStream.close(); String fileContent = new String(buffer); return fileContent; } catch (IOException e) { throw new RuntimeException(e); } } protected XWalkView getXWalkView() { return mXWalkView; } protected boolean canGoBackOnUiThread() throws Throwable { return runTestOnUiThreadAndGetResult(new Callable<Boolean>() { @Override public Boolean call() { return mXWalkView.getNavigationHistory().canGoBack(); } }); } protected boolean hasEnteredFullScreenOnUiThread() throws Throwable { return runTestOnUiThreadAndGetResult(new Callable<Boolean>() { @Override public Boolean call() { return mXWalkView.hasEnteredFullscreen(); } }); } protected void leaveFullscreenOnUiThread() throws Throwable { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.leaveFullscreen(); } }); } protected boolean canGoForwardOnUiThread() throws Throwable { return runTestOnUiThreadAndGetResult(new Callable<Boolean>() { @Override public Boolean call() { return mXWalkView.getNavigationHistory().canGoForward(); } }); } protected XWalkNavigationItem getCurrentItemOnUiThread() throws Throwable { return runTestOnUiThreadAndGetResult(new Callable<XWalkNavigationItem>() { @Override public XWalkNavigationItem call() { return mXWalkView.getNavigationHistory().getCurrentItem(); } }); } protected String executeJavaScriptAndWaitForResult(final String code) throws Exception { final OnEvaluateJavaScriptResultHelper helper = mTestHelperBridge.getOnEvaluateJavaScriptResultHelper(); getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { helper.evaluateJavascript(mXWalkView, code); } }); helper.waitUntilHasValue(); Assert.assertTrue("Failed to retrieve JavaScript evaluation results.", helper.hasValue()); return helper.getJsonResultAndClear(); } protected String getUrlOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<String>() { @Override public String call() throws Exception { return mXWalkView.getUrl(); } }); } protected String getRemoteDebuggingUrlOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<String>() { @Override public String call() throws Exception { if(mXWalkView.getRemoteDebuggingUrl() == null) { return ""; } return mXWalkView.getRemoteDebuggingUrl().getPath(); } }); } protected String getCurrentItemUrlOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<String>() { @Override public String call() throws Exception { return mXWalkView.getNavigationHistory().getCurrentItem().getUrl(); } }); } protected String getNavigationUrlOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<String>() { @Override public String call() throws Exception { return mXWalkView.getNavigationHistory().getCurrentItem().getUrl(); } }); } protected String getNavigationOriginalUrlOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<String>() { @Override public String call() throws Exception { return mXWalkView.getNavigationHistory().getCurrentItem().getOriginalUrl(); } }); } protected String getNavigationTitleOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<String>() { @Override public String call() throws Exception { return mXWalkView.getNavigationHistory().getCurrentItem().getTitle(); } }); } protected String getSizeOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<String>() { @Override public String call() throws Exception { return String.valueOf(mXWalkView.getNavigationHistory().size()); } }); } protected String hasItemAtOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<String>() { @Override public String call() throws Exception { return String.valueOf(mXWalkView.getNavigationHistory().hasItemAt(1)); } }); } protected String getOriginalUrlOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<String>() { @Override public String call() throws Exception { return mXWalkView.getOriginalUrl(); } }); } protected void clearCacheOnUiThread(final boolean includeDiskFiles) throws Exception { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.clearCache(includeDiskFiles); } }); } protected String getAPIVersionOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<String>() { @Override public String call() throws Exception { return mXWalkView.getAPIVersion(); } }); } protected String getXWalkVersionOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<String>() { @Override public String call() throws Exception { return mXWalkView.getXWalkVersion(); } }); } private String getAssetsFileContent(AssetManager assetManager, String fileName) throws IOException { String result = ""; InputStream inputStream = null; try { inputStream = assetManager.open(fileName); int size = inputStream.available(); byte[] buffer = new byte[size]; inputStream.read(buffer); result = new String(buffer); } finally { if (inputStream != null) { inputStream.close(); } } return result; } public class PerformExecute implements Runnable { protected StringBuffer urlBuf; public PerformExecute(StringBuffer url) { urlBuf = url; } @Override public void run() { } } public static class EmptyInputStream extends InputStream { @Override public int available() { return 0; } @Override public int read() throws IOException { return -1; } @Override public int read(byte b[]) throws IOException { return -1; } @Override public int read(byte b[], int off, int len) throws IOException { return -1; } @Override public long skip(long n) throws IOException { if (n < 0) throw new IOException("skipping negative number of bytes"); return 0; } } protected void goBackSync(final int n) throws Throwable { runTestWaitPageFinished(new Runnable(){ @Override public void run() { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.getNavigationHistory().navigate( XWalkNavigationHistory.Direction.BACKWARD, n); } }); } }); } protected void goForwardSync(final int n) throws Throwable { runTestWaitPageFinished(new Runnable(){ @Override public void run() { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.getNavigationHistory().navigate( XWalkNavigationHistory.Direction.FORWARD, n); } }); } }); } protected void setServerResponseAndLoad(int upto) throws Throwable { for (int i = 0; i < upto; ++i) { String html = "<html><head><title>" + TITLES[i] + "</title></head></html>"; mUrls[i] = mWebServer.setResponse(PATHS[i], html, null); loadUrlSync(mUrls[i]); } } protected void saveAndRestoreStateOnUiThread() throws Throwable { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { Bundle bundle = new Bundle(); mXWalkView.saveState(bundle); mRestoreXWalkView.restoreState(bundle); } }); } protected boolean pollOnUiThread(final Callable<Boolean> callable) throws Exception { return CriteriaHelper.pollForCriteria(new Criteria() { @Override public boolean isSatisfied() { try { return runTestOnUiThreadAndGetResult(callable); } catch (Throwable e) { return false; } } }); } protected void checkHistoryItemList(XWalkView restoreXWalkView) throws Throwable { XWalkNavigationHistory history = getNavigationHistoryOnUiThread(restoreXWalkView); assertEquals(NUM_NAVIGATIONS, history.size()); assertEquals(NUM_NAVIGATIONS - 1, history.getCurrentIndex()); for (int i = 0; i < NUM_NAVIGATIONS; ++i) { assertEquals(mUrls[i], history.getItemAt(i).getUrl()); assertEquals(TITLES[i], history.getItemAt(i).getTitle()); } } private XWalkNavigationHistory getNavigationHistoryOnUiThread( final XWalkView content) throws Throwable{ return runTestOnUiThreadAndGetResult(new Callable<XWalkNavigationHistory>() { @Override public XWalkNavigationHistory call() throws Exception { return content.getNavigationHistory(); } }); } @Override protected void tearDown() throws Exception { if (mWebServer != null) { mWebServer.shutdown(); } if(mainActivity != null) { mainActivity.finish(); } super.tearDown(); } public class TestJavascriptInterface { @JavascriptInterface public String getTextWithoutAnnotation() { return mExpectedStr; } @JavascriptInterface public String getText() { return mExpectedStr; } @JavascriptInterface public String getDateText() { return new Date().toString(); } } protected void addJavascriptInterface() { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { getXWalkView().addJavascriptInterface(new TestJavascriptInterface(), "testInterface"); } }); } protected void raisesExceptionAndSetTitle(String script) throws Throwable { executeJavaScriptAndWaitForResult("try { var title = " + script + ";" + " document.title = title;" + "} catch (exception) {" + " document.title = \"xwalk\";" + "}"); } public class TestXWalkUIClient extends TestXWalkUIClientBase { public TestXWalkUIClient() { super(mTestHelperBridge, mXWalkView, callbackCalled); } } class TestXWalkResourceClient extends TestXWalkResourceClientBase { public TestXWalkResourceClient() { super(mTestHelperBridge,mXWalkView); } } protected void loadUrlSync(final String url) throws Exception { CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper(); int currentCallCount = pageFinishedHelper.getCallCount(); loadUrlAsync(url); pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS); } protected void loadUrlSync(final String url, final String content) throws Exception { CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper(); int currentCallCount = pageFinishedHelper.getCallCount(); loadUrlAsync(url, content); pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS); } protected void loadJavaScriptSync(final String url, final String code) throws Exception { CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper(); int currentCallCount = pageFinishedHelper.getCallCount(); loadUrlAsync(url); loadUrlAsync(code); pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS); } protected void loadFromManifestSync(final String path, final String name) throws Exception { CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper(); int currentCallCount = pageFinishedHelper.getCallCount(); loadFromManifestAsync(path, name); pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS); } protected void loadFromManifestAsync(final String path, final String name) throws Exception { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { String manifestContent = ""; try { manifestContent = getAssetsFileContent(mainActivity.getAssets(), name); } catch (IOException e) { e.printStackTrace(); } mXWalkView.loadAppFromManifest(path, manifestContent); } }); } protected void loadAssetFile(String fileName) throws Exception { CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper(); int currentCallCount = pageFinishedHelper.getCallCount(); String fileContent = getFileContent(fileName); loadDataAsync(fileName, fileContent, "text/html", false); pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS); } protected void runTestWaitPageFinished(Runnable runnable) throws Exception{ CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper(); int currentCallCount = pageFinishedHelper.getCallCount(); runnable.run(); pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS); } protected void reloadSync(final int mode) throws Exception { runTestWaitPageFinished(new Runnable(){ @Override public void run() { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.reload(mode); } }); } }); } protected void loadDataSync(final String url, final String data, final String mimeType, final boolean isBase64Encoded) throws Exception { CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper(); int currentCallCount = pageFinishedHelper.getCallCount(); loadDataAsync(url, data, mimeType, isBase64Encoded); pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS); } public void loadAssetFileAndWaitForTitle(String fileName) throws Exception { CallbackHelper getTitleHelper = mTestHelperBridge.getOnTitleUpdatedHelper(); int currentCallCount = getTitleHelper.getCallCount(); String fileContent = getFileContent(fileName); loadDataSync(fileName, fileContent, "text/html", false); getTitleHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS); } public boolean checkMethodInClass(Class<?> clazz, String methodName){ Method[] methods = clazz.getMethods(); for(Method method : methods) { if(method.getName().equals(methodName)){ return true; } } Method[] methods2 = clazz.getDeclaredMethods(); for(Method method : methods2) { if(method.getName().equals(methodName)){ return true; } } return false; } public void clickOnElementId_evaluateJavascript(final String id) throws Exception { Assert.assertTrue(CriteriaHelper.pollForCriteria(new Criteria() { @Override public boolean isSatisfied() { try { String idIsNotNull = executeJavaScriptAndWaitForResult( "document.getElementById('" + id + "') != null"); return idIsNotNull.equals("true"); } catch (Throwable t) { t.printStackTrace(); Assert.fail("Failed to check if DOM is loaded: " + t.toString()); return false; } } }, WAIT_TIMEOUT_MS, CHECK_INTERVAL)); try { executeJavaScriptAndWaitForResult( "var evObj = document.createEvent('Events'); " + "evObj.initEvent('click', true, false); " + "document.getElementById('" + id + "').dispatchEvent(evObj);" + "console.log('element with id [" + id + "] clicked');"); } catch (Throwable t) { t.printStackTrace(); } } public void clickOnElementId(final String id, String frameName) throws Exception { String str; if (frameName != null) { str = "top.window." + frameName + ".document.getElementById('" + id + "')"; } else { str = "document.getElementById('" + id + "')"; } final String script1 = str + " != null"; final String script2 = str + ".dispatchEvent(evObj);"; Assert.assertTrue(CriteriaHelper.pollForCriteria(new Criteria() { @Override public boolean isSatisfied() { try { String idIsNotNull = executeJavaScriptAndWaitForResult(script1); return idIsNotNull.equals("true"); } catch (Throwable t) { t.printStackTrace(); Assert.fail("Failed to check if DOM is loaded: " + t.toString()); return false; } } }, WAIT_TIMEOUT_MS, CHECK_INTERVAL)); try { loadJavaScriptUrl("javascript:var evObj = document.createEvent('Events'); " + "evObj.initEvent('click', true, false); " + script2 + "console.log('element with id [" + id + "] clicked');"); } catch (Throwable t) { t.printStackTrace(); } } protected String addPageToTestServer(TestWebServer webServer, String httpPath, String html) { List<Pair<String, String>> headers = new ArrayList<Pair<String, String>>(); headers.add(Pair.create("Content-Type", "text/html")); headers.add(Pair.create("Cache-Control", "no-store")); return webServer.setResponse(httpPath, html, headers); } protected void stopLoading() throws Exception { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.stopLoading(); } }); } protected void pauseTimers() throws Exception { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.pauseTimers(); } }); } protected void resumeTimers() throws Exception { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.resumeTimers(); } }); } protected void onHide() throws Exception { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.onHide(); } }); } protected void onShow() throws Exception { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.onShow(); } }); } protected void onDestroy() throws Exception { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.onDestroy(); } }); } protected void setDownloadListener() { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.setDownloadListener(new XWalkDownloadListener(getActivity()) { @Override public void onDownloadStart(String url, String userAgent, String contentDisposition, String mimetype, long contentLength) { // TODO Auto-generated method stub mTestHelperBridge.onDownloadStart(url, userAgent, contentDisposition, mimetype, contentLength); } }); } }); } protected boolean canZoomInOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return mXWalkView.canZoomIn(); } }); } protected boolean canZoomOutOnUiThread() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return mXWalkView.canZoomOut(); } }); } protected void zoomInOnUiThreadAndWait() throws Throwable { final double dipScale = DeviceDisplayInfo.create(getActivity()).getDIPScale() ; final float previousScale = mTestHelperBridge.getOnScaleChangedHelper().getScale() * (float)dipScale; assertTrue(runTestOnUiThreadAndGetResult(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return mXWalkView.zoomIn(); } })); // The zoom level is updated asynchronously. pollOnUiThread(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return previousScale != mTestHelperBridge.getOnScaleChangedHelper().getScale() * (float)dipScale; } }); } protected void zoomOutOnUiThreadAndWait() throws Throwable { final double dipScale = DeviceDisplayInfo.create(getActivity()).getDIPScale() ; final float previousScale = mTestHelperBridge.getOnScaleChangedHelper().getScale() * (float)dipScale; assertTrue(runTestOnUiThreadAndGetResult(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return mXWalkView.zoomOut(); } })); // The zoom level is updated asynchronously. pollOnUiThread(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return previousScale != mTestHelperBridge.getOnScaleChangedHelper().getScale() * (float)dipScale; } }); } protected void zoomByOnUiThreadAndWait(final float delta) throws Throwable { final double dipScale = DeviceDisplayInfo.create(getActivity()).getDIPScale() ; final float previousScale = mTestHelperBridge.getOnScaleChangedHelper().getScale() * (float)dipScale; getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.zoomBy(delta); } }); // The zoom level is updated asynchronously. pollOnUiThread(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return previousScale != mTestHelperBridge.getOnScaleChangedHelper().getScale() * (float)dipScale; } }); } protected void setAcceptLanguages(final String languages) { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.setAcceptLanguages(languages); } }); } protected void setUserAgent(final String userAgent) { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.setUserAgentString(userAgent); } }); } protected void setCookie(final String name, final String value) throws Exception { String jsCommand = "javascript:void((function(){" + "var expirationDate = new Date();" + "expirationDate.setDate(expirationDate.getDate() + 5);" + "document.cookie='" + name + "=" + value + "; expires=' + expirationDate.toUTCString();" + "})())"; loadJavaScriptUrl(jsCommand); } protected void waitForCookie(final String url) throws InterruptedException { assertTrue(CriteriaHelper.pollForCriteria(new Criteria() { @Override public boolean isSatisfied() { return mCookieManager.getCookie(url) != null; } }, 6000, 50)); } protected void validateCookies(String responseCookie, String... expectedCookieNames) { String[] cookies = responseCookie.split(";"); Set<String> foundCookieNames = new HashSet<String>(); for (String cookie : cookies) { foundCookieNames.add(cookie.substring(0, cookie.indexOf("=")).trim()); } MoreAsserts.assertEquals( foundCookieNames, new HashSet<String>(Arrays.asList(expectedCookieNames))); } public static final String ABOUT_TITLE = "About the Google"; protected String addAboutPageToTestServer(TestWebServer webServer) { return addPageToTestServer(webServer, "/" + "about.html", "<html><head><title>" + ABOUT_TITLE + "</title></head></html>"); } protected WebResourceResponse stringToWebResourceResponse(String input) throws Throwable { final String mimeType = "text/html"; final String encoding = "UTF-8"; return new WebResourceResponse( mimeType, encoding, new ByteArrayInputStream(input.getBytes(encoding))); } protected void loadJavaScriptUrl(final String url) throws Exception { if (!url.startsWith("javascript:")) { Log.w("Test", "loadJavascriptUrl only accepts javascript: url"); return; } loadUrlAsync(url); } }
/* * Copyright 1997-2011 teatrove.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package openmarker.trove.classfile; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.ObjectStreamException; import java.io.Serializable; import java.lang.reflect.Array; import java.lang.reflect.Type; import java.util.Map; import openmarker.trove.classfile.generics.GenericArrayTypeDesc; import openmarker.trove.classfile.generics.GenericTypeDesc; import openmarker.trove.classfile.generics.GenericTypeFactory; import openmarker.trove.util.FlyweightSet; import openmarker.trove.util.IdentityMap; /** * This class is used to build field and return type descriptor strings as * defined in <i>The Java Virtual Machine Specification</i>, section 4.3.2. * TypeDesc instances are canonicalized and therefore "==" comparable. * * @author Brian S O'Neill, Nick Hagan */ public abstract class TypeDesc extends Descriptor implements Serializable { /** * Type code returned from getTypeCode, which can be used with the * newarray instruction. */ public final static int OBJECT_CODE = 0, VOID_CODE = 1, BOOLEAN_CODE = 4, CHAR_CODE = 5, FLOAT_CODE = 6, DOUBLE_CODE = 7, BYTE_CODE = 8, SHORT_CODE = 9, INT_CODE = 10, LONG_CODE = 11; /** primitive type void */ public final static TypeDesc VOID; /** primitive type boolean */ public final static TypeDesc BOOLEAN; /** primitive type char */ public final static TypeDesc CHAR; /** primitive type byte */ public final static TypeDesc BYTE; /** primitive type short */ public final static TypeDesc SHORT; /** primitive type int */ public final static TypeDesc INT; /** primitive type long */ public final static TypeDesc LONG; /** primitive type float */ public final static TypeDesc FLOAT; /** primitive type double */ public final static TypeDesc DOUBLE; /** object type java.lang.Object, provided for convenience */ public final static TypeDesc OBJECT; /** object type java.lang.String, provided for convenience */ public final static TypeDesc STRING; // Pool of all shared instances. Ensures identity comparison works. final static FlyweightSet cInstances; // Cache that maps Classes to TypeDescs. final static Map<Object, TypeDesc> cClassesToInstances; static { cInstances = new FlyweightSet(); cClassesToInstances = new IdentityMap(); VOID = intern(new PrimitiveType("V", VOID_CODE)); BOOLEAN = intern(new PrimitiveType("Z", BOOLEAN_CODE)); CHAR = intern(new PrimitiveType("C", CHAR_CODE)); BYTE = intern(new PrimitiveType("B", BYTE_CODE)); SHORT = intern(new PrimitiveType("S", SHORT_CODE)); INT = intern(new PrimitiveType("I", INT_CODE)); LONG = intern(new PrimitiveType("J", LONG_CODE)); FLOAT = intern(new PrimitiveType("F", FLOAT_CODE)); DOUBLE = intern(new PrimitiveType("D", DOUBLE_CODE)); OBJECT = forClass("java.lang.Object"); STRING = forClass("java.lang.String"); } static TypeDesc intern(TypeDesc type) { return (TypeDesc)cInstances.put(type); } public synchronized static TypeDesc forClass(Class<?> clazz, Type genericType) { if (clazz == null) { return null; } else if (genericType == null || genericType == clazz) { return forClass(clazz); } else { return forClass(clazz, GenericTypeFactory.fromType(genericType)); } } protected static class ClassKey { private Class<?> clazz; private GenericTypeDesc genericType; public ClassKey(Class<?> clazz, GenericTypeDesc genericType) { this.clazz = clazz; this.genericType = genericType; } public int hashCode() { int hashCode = 11; hashCode += (13 * this.clazz.hashCode()); if (this.genericType != null) { hashCode += (17 * this.genericType.hashCode()); } return hashCode; } public boolean equals(Object object) { if (object == this) { return true; } else if (!(object instanceof ClassKey)) { return false; } ClassKey other = (ClassKey) object; if (!this.clazz.equals(other.clazz) || (this.genericType == null && other.genericType != null) || (other.genericType == null && this.genericType != null) || (this.genericType != null && !this.genericType.equals(other.genericType))) { return false; } return true; } } public synchronized static TypeDesc forClass(Class<?> clazz, GenericTypeDesc genericType) { if (clazz == null) { return null; } else if (genericType == null) { return forClass(clazz); } ClassKey key = new ClassKey(clazz, genericType); TypeDesc type = cClassesToInstances.get(key); if (type != null) { return type; } if (clazz.isArray()) { if (genericType instanceof GenericArrayTypeDesc) { type = forClass ( clazz.getComponentType(), ((GenericArrayTypeDesc) genericType).getComponentType() ).toArrayType(); } else { type = forClass(clazz.getComponentType()).toArrayType(); } } else if (clazz.isPrimitive()) { if (clazz == int.class) { type = INT; } if (clazz == boolean.class) { type = BOOLEAN; } if (clazz == char.class) { type = CHAR; } if (clazz == byte.class) { type = BYTE; } if (clazz == long.class) { type = LONG; } if (clazz == float.class) { type = FLOAT; } if (clazz == double.class) { type = DOUBLE; } if (clazz == short.class) { type = SHORT; } if (clazz == void.class) { type = VOID; } } else { String name = clazz.getName(); type = intern(new GenericType(generateDescriptor(name), name, genericType)); } cClassesToInstances.put(key, type); return type; } /** * Acquire a TypeDesc from any class, including primitives and arrays. */ public synchronized static TypeDesc forClass(Class<?> clazz) { if (clazz == null) { return null; } TypeDesc type = cClassesToInstances.get(clazz); if (type != null) { return type; } if (clazz.isArray()) { type = forClass(clazz.getComponentType()).toArrayType(); } else if (clazz.isPrimitive()) { if (clazz == int.class) { type = INT; } if (clazz == boolean.class) { type = BOOLEAN; } if (clazz == char.class) { type = CHAR; } if (clazz == byte.class) { type = BYTE; } if (clazz == long.class) { type = LONG; } if (clazz == float.class) { type = FLOAT; } if (clazz == double.class) { type = DOUBLE; } if (clazz == short.class) { type = SHORT; } if (clazz == void.class) { type = VOID; } } else { String name = clazz.getName(); type = intern(new ObjectType(generateDescriptor(name), name)); } cClassesToInstances.put(clazz, type); return type; } /** * Acquire a TypeDesc from any class name, including primitives and arrays. * Primitive and array syntax matches Java declarations. */ public static TypeDesc forClass(String name) throws IllegalArgumentException { // TODO: Figure out how to cache these. Using a plain IdentityMap poses // a problem. The back reference to the key causes a memory leak. if (name.length() < 1) { throw invalidName(name); } int index1 = name.lastIndexOf('['); int index2 = name.lastIndexOf(']'); if (index2 >= 0) { if (index2 + 1 != name.length() || index1 + 1 != index2) { throw invalidName(name); } try { return forClass(name.substring(0, index1)).toArrayType(); } catch (IllegalArgumentException e) { throw invalidName(name); } } else if (index1 >= 0) { throw invalidName(name); } switch (name.charAt(0)) { case 'v': if (name.equals("void")) { return VOID; } break; case 'b': if (name.equals("boolean")) { return BOOLEAN; } else if (name.equals("byte")) { return BYTE; } break; case 'c': if (name.equals("char")) { return CHAR; } break; case 's': if (name.equals("short")) { return SHORT; } break; case 'i': if (name.equals("int")) { return INT; } break; case 'l': if (name.equals("long")) { return LONG; } break; case 'f': if (name.equals("float")) { return FLOAT; } break; case 'd': if (name.equals("double")) { return DOUBLE; } break; } String desc = generateDescriptor(name); if (name.indexOf('/') >= 0) { name = name.replace('/', '.'); } return intern(new ObjectType(desc, name)); } public static TypeDesc[] forClasses(Class<?>[] clazzes) { TypeDesc[] types = new TypeDesc[clazzes.length]; for (int i = 0; i < clazzes.length; i++) { types[i] = TypeDesc.forClass(clazzes[i]); } return types; } private static IllegalArgumentException invalidName(String name) { return new IllegalArgumentException("Invalid name: " + name); } /** * Acquire a TypeDesc from a type descriptor. This syntax is described in * section 4.3.2, Field Descriptors. */ public static TypeDesc forDescriptor(String desc) throws IllegalArgumentException { // TODO: Figure out how to cache these. Using a plain IdentityMap poses // a problem. The back reference to the key causes a memory leak. TypeDesc td; int cursor = 0; int dim = 0; try { char c; while ((c = desc.charAt(cursor++)) == '[') { dim++; } switch (c) { case 'V': td = VOID; break; case 'Z': td = BOOLEAN; break; case 'C': td = CHAR; break; case 'B': td = BYTE; break; case 'S': td = SHORT; break; case 'I': td = INT; break; case 'J': td = LONG; break; case 'F': td = FLOAT; break; case 'D': td = DOUBLE; break; case 'L': if (dim > 0) { desc = desc.substring(dim); cursor = 1; } StringBuffer name = new StringBuffer(desc.length() - 2); while ((c = desc.charAt(cursor++)) != ';') { if (c == '/') { c = '.'; } name.append(c); } td = intern(new ObjectType(desc, name.toString())); break; default: throw invalidDescriptor(desc); } } catch (NullPointerException e) { throw invalidDescriptor(desc); } catch (IndexOutOfBoundsException e) { throw invalidDescriptor(desc); } if (cursor != desc.length()) { throw invalidDescriptor(desc); } while (--dim >= 0) { td = td.toArrayType(); } return td; } private static IllegalArgumentException invalidDescriptor(String desc) { return new IllegalArgumentException("Invalid descriptor: " + desc); } private static String generateDescriptor(String classname) { int length = classname.length(); char[] buf = new char[length + 2]; buf[0] = 'L'; classname.getChars(0, length, buf, 1); int i; for (i=1; i<=length; i++) { char c = buf[i]; if (c == '.') { buf[i] = '/'; } } buf[i] = ';'; return new String(buf); } /* private static GenericDescriptor generateDescriptor(Type type) { GenericDescriptor descriptor = new GenericDescriptor(); descriptor.setSignature(generateDescriptor(type, descriptor)); return descriptor; } private static String generateDescriptor(Type type, GenericDescriptor descriptor) { if (type instanceof Class) { return generateDescriptor((Class) type, descriptor); } else if (type instanceof ParameterizedType) { return generateDescriptor((ParameterizedType) type, descriptor); } else if (type instanceof TypeVariable) { return generateDescriptor((TypeVariable) type, descriptor); } else if (type instanceof WildcardType) { return generateDescriptor((WildcardType) type, descriptor); } else if (type instanceof GenericArrayType) { return generateDescriptor((GenericArrayType) type, descriptor); } else { throw new IllegalArgumentException("invalid type: " + type); } } private static String generateDescriptor(Class<?> type, GenericDescriptor descriptor) { if (byte.class.equals(type)) { return "B"; } else if (char.class.equals(type)) { return "C"; } else if (double.class.equals(type)) { return "D"; } else if (float.class.equals(type)) { return "F"; } else if (int.class.equals(type)) { return "I"; } else if (long.class.equals(type)) { return "J"; } else if (short.class.equals(type)) { return "S"; } else if (boolean.class.equals(type)) { return "Z"; } String name = type.getName(); StringBuilder buffer = new StringBuilder(name.length() * 2); buffer.append('L').append(name.replace('.', '/')).append(';'); return buffer.toString(); } @SuppressWarnings("rawtypes") private static String generateDescriptor(ParameterizedType type, GenericDescriptor descriptor) { StringBuilder buffer = new StringBuilder(256); Class<?> rawtype = (Class) type.getRawType(); buffer.append('L') .append(rawtype.getName().replace('.', '/')).append('<'); for (Type argtype : type.getActualTypeArguments()) { buffer.append(generateDescriptor(argtype, descriptor)); } buffer.append('>').append(';'); return buffer.toString(); } private static String generateDescriptor(WildcardType type, GenericDescriptor descriptor) { StringBuilder buffer = new StringBuilder(128); boolean found = false; for (Type btype : type.getLowerBounds()) { found = true; if (Object.class.equals(btype)) { buffer.append('*'); } else { buffer.append('-') .append(generateDescriptor(btype, descriptor)); } } for (Type btype : type.getUpperBounds()) { found = true; if (Object.class.equals(btype)) { buffer.append('*'); } else { buffer.append('+') .append(generateDescriptor(btype, descriptor)); } } if (!found) { buffer.append('*'); } return buffer.toString(); } private static String generateDescriptor(TypeVariable<?> type, GenericDescriptor descriptor) { StringBuilder buffer = new StringBuilder(128); for (Type btype : type.getBounds()) { descriptor.addType(type.getName(), generateDescriptor(btype, descriptor)); // buffer.append('T').append(type.getName()).append(';'); // prefix.append('<').append(type.getName()).append(':') // .append(generateDescriptor(btype)).append('>'); // buffer.append('+').append(generateDescriptor(btype)); buffer.append('T').append(type.getName()).append(';'); } return buffer.toString(); } private static String generateDescriptor(GenericArrayType type, GenericDescriptor descriptor) { StringBuilder buffer = new StringBuilder(128); buffer.append('[') .append(generateDescriptor(type.getGenericComponentType(), descriptor)); return buffer.toString(); } */ transient final String mDescriptor; TypeDesc(String desc) { mDescriptor = desc; } /** * Returns a type descriptor string, excluding generics. */ public final String getDescriptor() { return mDescriptor; } /** * Returns the class name for this descriptor. If the type is primitive, * then the Java primitive type name is returned. If the type is an array, * only the root component type name is returned. */ public abstract String getRootName(); /** * Returns the class name for this descriptor. If the type is primitive, * then the Java primitive type name is returned. If the type is an array, * "[]" is append at the end of the name for each dimension. */ public abstract String getFullName(); /** * Returns a type code for operating on primitive types in switches. If * not primitive, OBJECT_CODE is returned. */ public abstract int getTypeCode(); /** * Returns true if this is a primitive type. */ public abstract boolean isPrimitive(); /** * Returns true if this is a primitive long or double type. */ public abstract boolean isDoubleWord(); /** * Returns true if this is an array type. */ public abstract boolean isArray(); /** * Returns the number of dimensions this array type has. If not an array, * zero is returned. */ public abstract int getDimensions(); /** * Returns the component type of this array type. If not an array, null is * returned. */ public abstract TypeDesc getComponentType(); /** * Returns the root component type of this array type. If not an array, * null is returned. */ public abstract TypeDesc getRootComponentType(); /** * Convertes this type to an array type. If already an array, another * dimension is added. */ public abstract TypeDesc toArrayType(); /** * Returns the object peer of this primitive type. For int, the object peer * is java.lang.Integer. If this type is an object type, it is simply * returned. */ public abstract TypeDesc toObjectType(); /** * Returns the primitive peer of this object type, if one exists. For * java.lang.Integer, the primitive peer is int. If this type is a * primitive type, it is simply returned. Arrays have no primitive peer. */ public abstract TypeDesc toPrimitiveType(); /** * Returns this type as a class. If the class isn't found, null is * returned. */ public abstract Class<?> toClass(); /** * Returns this type as a class. If the class isn't found, null is * returned. * @param loader optional ClassLoader to load class from */ public abstract Class<?> toClass(ClassLoader loader); /** * Returns this in type descriptor syntax. */ public String toString() { return mDescriptor; } public String getSignature() { return toString(); } public int hashCode() { return mDescriptor.hashCode(); } public boolean equals(Object other) { if (this == other) { return true; } if (other instanceof TypeDesc) { return ((TypeDesc)other).mDescriptor.equals(mDescriptor); } return false; } Object writeReplace() throws ObjectStreamException { return new External(mDescriptor); } private static class PrimitiveType extends TypeDesc { private transient final int mCode; private transient TypeDesc mArrayType; private transient TypeDesc mObjectType; PrimitiveType(String desc, int code) { super(desc); mCode = code; } public String getRootName() { switch (mCode) { default: case VOID_CODE: return "void"; case BOOLEAN_CODE: return "boolean"; case CHAR_CODE: return "char"; case BYTE_CODE: return "byte"; case SHORT_CODE: return "short"; case INT_CODE: return "int"; case LONG_CODE: return "long"; case FLOAT_CODE: return "float"; case DOUBLE_CODE: return "double"; } } public String getFullName() { return getRootName(); } public int getTypeCode() { return mCode; } public boolean isPrimitive() { return true; } public boolean isDoubleWord() { return mCode == DOUBLE_CODE || mCode == LONG_CODE; } public boolean isArray() { return false; } public int getDimensions() { return 0; } public TypeDesc getComponentType() { return null; } public TypeDesc getRootComponentType() { return null; } public TypeDesc toArrayType() { if (mArrayType == null) { char[] buf = new char[2]; buf[0] = '['; buf[1] = mDescriptor.charAt(0); mArrayType = intern(new ArrayType(new String(buf), this)); } return mArrayType; } public TypeDesc toObjectType() { if (mObjectType == null) { switch (mCode) { default: case VOID_CODE: mObjectType = forClass("java.lang.Void"); break; case BOOLEAN_CODE: mObjectType = forClass("java.lang.Boolean"); break; case CHAR_CODE: mObjectType = forClass("java.lang.Character"); break; case BYTE_CODE: mObjectType = forClass("java.lang.Byte"); break; case SHORT_CODE: mObjectType = forClass("java.lang.Short"); break; case INT_CODE: mObjectType = forClass("java.lang.Integer"); break; case LONG_CODE: mObjectType = forClass("java.lang.Long"); break; case FLOAT_CODE: mObjectType = forClass("java.lang.Float"); break; case DOUBLE_CODE: mObjectType = forClass("java.lang.Double"); break; } } return mObjectType; } public TypeDesc toPrimitiveType() { return this; } public Class<?> toClass() { switch (mCode) { default: case VOID_CODE: return void.class; case BOOLEAN_CODE: return boolean.class; case CHAR_CODE: return char.class; case BYTE_CODE: return byte.class; case SHORT_CODE: return short.class; case INT_CODE: return int.class; case LONG_CODE: return long.class; case FLOAT_CODE: return float.class; case DOUBLE_CODE: return double.class; } } public Class<?> toClass(ClassLoader loader) { return toClass(); } } private static class GenericType extends ObjectType { private transient GenericTypeDesc mGenericDesc; private transient TypeDesc mArrayType; GenericType(String desc, String name, GenericTypeDesc genericDesc) { super(desc, name); mGenericDesc = genericDesc; } /* public GenericTypeDesc getGenericDesc() { return mGenericDesc; } */ public TypeDesc toArrayType() { if (mArrayType == null) { int length = mDescriptor.length(); char[] buf = new char[length + 1]; buf[0] = '['; mDescriptor.getChars(0, length, buf, 1); mArrayType = intern ( new GenericArray(new String(buf), this, GenericArrayTypeDesc.forType(mGenericDesc)) ); } return mArrayType; } public int hashCode() { return super.hashCode() ^ mGenericDesc.hashCode(); } public boolean equals(Object other) { if (this == other) { return true; } if (other instanceof GenericType) { return super.equals(other) && ((GenericType) other).mGenericDesc.equals(this.mGenericDesc); } return false; } public String getSignature() { return mGenericDesc.getSignature(); } } private static class ObjectType extends TypeDesc { private transient final String mName; private transient TypeDesc mArrayType; private transient TypeDesc mPrimitiveType; private transient Class<?> mClass; ObjectType(String desc, String name) { super(desc); mName = name; } public String getRootName() { return mName; } public String getFullName() { return mName; } public int getTypeCode() { return OBJECT_CODE; } public boolean isPrimitive() { return false; } public boolean isDoubleWord() { return false; } public boolean isArray() { return false; } public int getDimensions() { return 0; } public TypeDesc getComponentType() { return null; } public TypeDesc getRootComponentType() { return null; } public TypeDesc toArrayType() { if (mArrayType == null) { int length = mDescriptor.length(); char[] buf = new char[length + 1]; buf[0] = '['; mDescriptor.getChars(0, length, buf, 1); mArrayType = intern(new ArrayType(new String(buf), this)); } return mArrayType; } public TypeDesc toObjectType() { return this; } public TypeDesc toPrimitiveType() { if (mPrimitiveType == null) { String name = mName; if (name.startsWith("java.lang.") && name.length() > 10) { switch (name.charAt(10)) { case 'V': if (name.equals("java.lang.Void")) { mPrimitiveType = VOID; } break; case 'B': if (name.equals("java.lang.Boolean")) { mPrimitiveType = BOOLEAN; } else if (name.equals("java.lang.Byte")) { mPrimitiveType = BYTE; } break; case 'C': if (name.equals("java.lang.Character")) { mPrimitiveType = CHAR; } break; case 'S': if (name.equals("java.lang.Short")) { mPrimitiveType = SHORT; } break; case 'I': if (name.equals("java.lang.Integer")) { mPrimitiveType = INT; } break; case 'L': if (name.equals("java.lang.Long")) { mPrimitiveType = LONG; } break; case 'F': if (name.equals("java.lang.Float")) { mPrimitiveType = FLOAT; } break; case 'D': if (name.equals("java.lang.Double")) { mPrimitiveType = DOUBLE; } break; } } } return mPrimitiveType; } public final Class<?> toClass() { if (mClass == null) { mClass = toClass(null); } return mClass; } public Class<?> toClass(ClassLoader loader) { TypeDesc type = toPrimitiveType(); if (type != null) { switch (type.getTypeCode()) { default: case VOID_CODE: return Void.class; case BOOLEAN_CODE: return Boolean.class; case CHAR_CODE: return Character.class; case FLOAT_CODE: return Float.class; case DOUBLE_CODE: return Double.class; case BYTE_CODE: return Byte.class; case SHORT_CODE: return Short.class; case INT_CODE: return Integer.class; case LONG_CODE: return Long.class; } } try { if (loader == null) { return Class.forName(mName); } else { return loader.loadClass(mName); } } catch (ClassNotFoundException e) { return null; } } } private static class GenericArray extends ArrayType { private transient TypeDesc mArrayType; private transient GenericTypeDesc mGenericDesc; GenericArray(String desc, TypeDesc component, GenericTypeDesc genericDesc) { super(desc, component); mGenericDesc = genericDesc; } /* public GenericTypeDesc getGenericDesc() { return mGenericDesc; } */ public TypeDesc toArrayType() { if (mArrayType == null) { int length = mDescriptor.length(); char[] buf = new char[length + 1]; buf[0] = '['; mDescriptor.getChars(0, length, buf, 1); mArrayType = intern ( new GenericArray(new String(buf), this, GenericArrayTypeDesc.forType(mGenericDesc)) ); } return mArrayType; } public int hashCode() { return super.hashCode() ^ mGenericDesc.hashCode(); } public boolean equals(Object other) { if (this == other) { return true; } if (other instanceof GenericType) { return super.equals(other) && ((GenericType) other).mGenericDesc.equals(this.mGenericDesc); } return false; } public String getSignature() { return mGenericDesc.getSignature(); } } private static class ArrayType extends ObjectType { private transient final TypeDesc mComponent; private transient final String mFullName; ArrayType(String desc, TypeDesc component) { super(desc, component.getRootName()); mComponent = component; mFullName = component.getFullName().concat("[]"); } public String getFullName() { return mFullName; } public boolean isArray() { return true; } public int getDimensions() { return mComponent.getDimensions() + 1; } public TypeDesc getComponentType() { return mComponent; } public TypeDesc getRootComponentType() { TypeDesc type = mComponent; while (type.isArray()) { type = type.getComponentType(); } return type; } public TypeDesc toPrimitiveType() { return null; } public Class<?> toClass(ClassLoader loader) { if (loader == null) { return arrayClass(getRootComponentType().toClass()); } else { return arrayClass(getRootComponentType().toClass(loader)); } } private Class<?> arrayClass(Class<?> clazz) { if (clazz == null) { return null; } int dim = getDimensions(); try { if (dim == 1) { return Array.newInstance(clazz, 0).getClass(); } else { return Array.newInstance(clazz, new int[dim]).getClass(); } } catch (IllegalArgumentException e) { return null; } } } private static class External implements Externalizable { private String mDescriptor; public External(String desc) { mDescriptor = desc; } public void writeExternal(ObjectOutput out) throws IOException { out.writeUTF(mDescriptor); } public void readExternal(ObjectInput in) throws IOException { mDescriptor = in.readUTF(); } public Object readResolve() throws ObjectStreamException { return forDescriptor(mDescriptor); } } }
// Dstl (c) Crown Copyright 2017 package uk.gov.dstl.baleen.consumers.utils; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.uima.fit.util.JCasUtil; import org.apache.uima.jcas.JCas; import org.apache.uima.jcas.tcas.DocumentAnnotation; import com.google.common.base.Strings; import uk.gov.dstl.baleen.types.metadata.Metadata; import uk.gov.dstl.baleen.types.metadata.PublishedId; import uk.gov.dstl.baleen.types.semantic.Entity; import uk.gov.dstl.baleen.types.semantic.Relation; import uk.gov.dstl.baleen.uima.UimaMonitor; import uk.gov.dstl.baleen.uima.UimaSupport; /** * Helper class for converting a CAS object into a single document (i.e. with entities and relations * embedded) for persistence using the following schema: * * <pre> * { * content, * language, * externalId, * dateAccessed, * sourceUri, * docType, * classification, * caveats: [], * releasability: [], * publishedId: [], * metadata: { * key1: value1, * key2: value2, * ... * }, * entities: [ * { * externalId, * value, * confidence, * type, * begin, * end, * ... * } * ], * relations: [ * { * ... * } * ] * } * </pre> * * The protective marking set on the DocumentAnnotation is used as the classification of the * document, and ProtectiveMarking annotations are ignored. Events are not currently supported. * * <p>Use of this class ensures consistency of formats across databases, e.g. Elasticsearch and * ActiveMQ */ public class SingleDocumentConsumerFormat { /** Private constructor for utility class */ private SingleDocumentConsumerFormat() { // Do nothing } /** * Convert the provided jCas object into a standardised representation * * @param jCas * @param contentHashAsId Should a hash of the content be used to generate the ID? If false, then * a hash of the Source URI is used instead. * @param fields An instance of IEntityConverterFields to be used * @param monitor * @param support * @return Standardised representation of jCas */ public static Map<String, Object> formatCas( JCas jCas, IEntityConverterFields fields, boolean contentHashAsId, UimaMonitor monitor, UimaSupport support) { Set<String> stopFeatures = ConsumerUtils.getDefaultStopFeatures(); Map<String, Object> output = new HashMap<>(); EntityRelationConverter entityRelationConverter = new EntityRelationConverter( monitor, false, support.getDocumentHistory(jCas), stopFeatures, fields); // Content and language output.put("content", jCas.getDocumentText()); if (!Strings.isNullOrEmpty(jCas.getDocumentLanguage())) { output.put("language", jCas.getDocumentLanguage()); } // Document Annotations DocumentAnnotation da = UimaSupport.getDocumentAnnotation(jCas); output.putAll(createDocumentAnnotationMap(da)); String id = ConsumerUtils.getExternalId(da, contentHashAsId); output.put("externalId", id); // Metadata Annotations Collection<PublishedId> publishedIds = JCasUtil.select(jCas, PublishedId.class); if (!publishedIds.isEmpty()) { output.put("publishedId", createPublishedIdList(publishedIds)); } Collection<Metadata> metadata = JCasUtil.select(jCas, Metadata.class); if (!metadata.isEmpty()) { output.put("metadata", createMetadataMap(metadata)); } // Entities List<Map<String, Object>> entitiesList = new ArrayList<>(); Collection<Entity> entities = JCasUtil.select(jCas, Entity.class); for (Entity ent : entities) { entitiesList.add(entityRelationConverter.convertEntity(ent)); } output.put("entities", entitiesList); // Relations List<Map<String, Object>> relationsList = new ArrayList<>(); Collection<Relation> relations = JCasUtil.select(jCas, Relation.class); for (Relation rel : relations) { relationsList.add(entityRelationConverter.convertRelation(rel)); } output.put("relations", relationsList); return output; } /** Create a map containing information from the DocumentAnnotation object */ public static Map<String, Object> createDocumentAnnotationMap(DocumentAnnotation da) { Map<String, Object> map = new HashMap<>(); if (!Strings.isNullOrEmpty(da.getSourceUri())) { map.put("sourceUri", da.getSourceUri()); } map.put("dateAccessed", da.getTimestamp()); if (!Strings.isNullOrEmpty(da.getDocType())) { map.put("docType", da.getDocType()); } if (!Strings.isNullOrEmpty(da.getDocumentClassification())) { map.put("classification", da.getDocumentClassification().toUpperCase()); } if (da.getDocumentCaveats() != null) { String[] caveats = da.getDocumentCaveats().toArray(); if (caveats.length > 0) { map.put("caveats", caveats); } } if (da.getDocumentReleasability() != null) { String[] rels = da.getDocumentReleasability().toArray(); if (rels.length > 0) { map.put("releasability", rels); } } return map; } /** * Create a map of all metadata objects in a collection. Duplicate key values will be converted * into a list of objects. */ public static Map<String, Object> createMetadataMap(Collection<Metadata> md) { Map<String, Object> metadata = new HashMap<>(); for (Metadata m : md) { String key = m.getKey().replaceAll("\\.", "_"); if (metadata.containsKey(key)) { List<Object> list = new ArrayList<>(); Object o = metadata.get(key); if (o instanceof List) { list.addAll((List<?>) o); } list.add(m.getValue()); metadata.put(key, m.getValue()); } else { metadata.put(key, m.getValue()); } } return metadata; } /** Create a list of PublishedId values from a collection of PublishedIds */ public static List<String> createPublishedIdList(Collection<PublishedId> publishedIds) { List<String> pids = new ArrayList<>(); publishedIds.forEach(x -> pids.add(x.getValue())); return pids; } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.eks.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/eks-2017-11-01/UpdateClusterConfig" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateClusterConfigRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name of the Amazon EKS cluster to update. * </p> */ private String name; private VpcConfigRequest resourcesVpcConfig; /** * <p> * Enable or disable exporting the Kubernetes control plane logs for your cluster to CloudWatch Logs. By default, * cluster control plane logs aren't exported to CloudWatch Logs. For more information, see <a * href="https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html">Amazon EKS Cluster Control Plane * Logs</a> in the <i> <i>Amazon EKS User Guide</i> </i>. * </p> * <note> * <p> * CloudWatch Logs ingestion, archive storage, and data scanning rates apply to exported control plane logs. For * more information, see <a href="http://aws.amazon.com/cloudwatch/pricing/">Amazon CloudWatch Pricing</a>. * </p> * </note> */ private Logging logging; /** * <p> * Unique, case-sensitive identifier that you provide to ensure the idempotency of the request. * </p> */ private String clientRequestToken; /** * <p> * The name of the Amazon EKS cluster to update. * </p> * * @param name * The name of the Amazon EKS cluster to update. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the Amazon EKS cluster to update. * </p> * * @return The name of the Amazon EKS cluster to update. */ public String getName() { return this.name; } /** * <p> * The name of the Amazon EKS cluster to update. * </p> * * @param name * The name of the Amazon EKS cluster to update. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateClusterConfigRequest withName(String name) { setName(name); return this; } /** * @param resourcesVpcConfig */ public void setResourcesVpcConfig(VpcConfigRequest resourcesVpcConfig) { this.resourcesVpcConfig = resourcesVpcConfig; } /** * @return */ public VpcConfigRequest getResourcesVpcConfig() { return this.resourcesVpcConfig; } /** * @param resourcesVpcConfig * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateClusterConfigRequest withResourcesVpcConfig(VpcConfigRequest resourcesVpcConfig) { setResourcesVpcConfig(resourcesVpcConfig); return this; } /** * <p> * Enable or disable exporting the Kubernetes control plane logs for your cluster to CloudWatch Logs. By default, * cluster control plane logs aren't exported to CloudWatch Logs. For more information, see <a * href="https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html">Amazon EKS Cluster Control Plane * Logs</a> in the <i> <i>Amazon EKS User Guide</i> </i>. * </p> * <note> * <p> * CloudWatch Logs ingestion, archive storage, and data scanning rates apply to exported control plane logs. For * more information, see <a href="http://aws.amazon.com/cloudwatch/pricing/">Amazon CloudWatch Pricing</a>. * </p> * </note> * * @param logging * Enable or disable exporting the Kubernetes control plane logs for your cluster to CloudWatch Logs. By * default, cluster control plane logs aren't exported to CloudWatch Logs. For more information, see <a * href="https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html">Amazon EKS Cluster Control * Plane Logs</a> in the <i> <i>Amazon EKS User Guide</i> </i>.</p> <note> * <p> * CloudWatch Logs ingestion, archive storage, and data scanning rates apply to exported control plane logs. * For more information, see <a href="http://aws.amazon.com/cloudwatch/pricing/">Amazon CloudWatch * Pricing</a>. * </p> */ public void setLogging(Logging logging) { this.logging = logging; } /** * <p> * Enable or disable exporting the Kubernetes control plane logs for your cluster to CloudWatch Logs. By default, * cluster control plane logs aren't exported to CloudWatch Logs. For more information, see <a * href="https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html">Amazon EKS Cluster Control Plane * Logs</a> in the <i> <i>Amazon EKS User Guide</i> </i>. * </p> * <note> * <p> * CloudWatch Logs ingestion, archive storage, and data scanning rates apply to exported control plane logs. For * more information, see <a href="http://aws.amazon.com/cloudwatch/pricing/">Amazon CloudWatch Pricing</a>. * </p> * </note> * * @return Enable or disable exporting the Kubernetes control plane logs for your cluster to CloudWatch Logs. By * default, cluster control plane logs aren't exported to CloudWatch Logs. For more information, see <a * href="https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html">Amazon EKS Cluster * Control Plane Logs</a> in the <i> <i>Amazon EKS User Guide</i> </i>.</p> <note> * <p> * CloudWatch Logs ingestion, archive storage, and data scanning rates apply to exported control plane logs. * For more information, see <a href="http://aws.amazon.com/cloudwatch/pricing/">Amazon CloudWatch * Pricing</a>. * </p> */ public Logging getLogging() { return this.logging; } /** * <p> * Enable or disable exporting the Kubernetes control plane logs for your cluster to CloudWatch Logs. By default, * cluster control plane logs aren't exported to CloudWatch Logs. For more information, see <a * href="https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html">Amazon EKS Cluster Control Plane * Logs</a> in the <i> <i>Amazon EKS User Guide</i> </i>. * </p> * <note> * <p> * CloudWatch Logs ingestion, archive storage, and data scanning rates apply to exported control plane logs. For * more information, see <a href="http://aws.amazon.com/cloudwatch/pricing/">Amazon CloudWatch Pricing</a>. * </p> * </note> * * @param logging * Enable or disable exporting the Kubernetes control plane logs for your cluster to CloudWatch Logs. By * default, cluster control plane logs aren't exported to CloudWatch Logs. For more information, see <a * href="https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html">Amazon EKS Cluster Control * Plane Logs</a> in the <i> <i>Amazon EKS User Guide</i> </i>.</p> <note> * <p> * CloudWatch Logs ingestion, archive storage, and data scanning rates apply to exported control plane logs. * For more information, see <a href="http://aws.amazon.com/cloudwatch/pricing/">Amazon CloudWatch * Pricing</a>. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateClusterConfigRequest withLogging(Logging logging) { setLogging(logging); return this; } /** * <p> * Unique, case-sensitive identifier that you provide to ensure the idempotency of the request. * </p> * * @param clientRequestToken * Unique, case-sensitive identifier that you provide to ensure the idempotency of the request. */ public void setClientRequestToken(String clientRequestToken) { this.clientRequestToken = clientRequestToken; } /** * <p> * Unique, case-sensitive identifier that you provide to ensure the idempotency of the request. * </p> * * @return Unique, case-sensitive identifier that you provide to ensure the idempotency of the request. */ public String getClientRequestToken() { return this.clientRequestToken; } /** * <p> * Unique, case-sensitive identifier that you provide to ensure the idempotency of the request. * </p> * * @param clientRequestToken * Unique, case-sensitive identifier that you provide to ensure the idempotency of the request. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateClusterConfigRequest withClientRequestToken(String clientRequestToken) { setClientRequestToken(clientRequestToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getResourcesVpcConfig() != null) sb.append("ResourcesVpcConfig: ").append(getResourcesVpcConfig()).append(","); if (getLogging() != null) sb.append("Logging: ").append(getLogging()).append(","); if (getClientRequestToken() != null) sb.append("ClientRequestToken: ").append(getClientRequestToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateClusterConfigRequest == false) return false; UpdateClusterConfigRequest other = (UpdateClusterConfigRequest) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getResourcesVpcConfig() == null ^ this.getResourcesVpcConfig() == null) return false; if (other.getResourcesVpcConfig() != null && other.getResourcesVpcConfig().equals(this.getResourcesVpcConfig()) == false) return false; if (other.getLogging() == null ^ this.getLogging() == null) return false; if (other.getLogging() != null && other.getLogging().equals(this.getLogging()) == false) return false; if (other.getClientRequestToken() == null ^ this.getClientRequestToken() == null) return false; if (other.getClientRequestToken() != null && other.getClientRequestToken().equals(this.getClientRequestToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getResourcesVpcConfig() == null) ? 0 : getResourcesVpcConfig().hashCode()); hashCode = prime * hashCode + ((getLogging() == null) ? 0 : getLogging().hashCode()); hashCode = prime * hashCode + ((getClientRequestToken() == null) ? 0 : getClientRequestToken().hashCode()); return hashCode; } @Override public UpdateClusterConfigRequest clone() { return (UpdateClusterConfigRequest) super.clone(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.controller.queue.clustered; import org.apache.nifi.cluster.coordination.ClusterCoordinator; import org.apache.nifi.cluster.coordination.ClusterTopologyEventListener; import org.apache.nifi.cluster.coordination.node.NodeConnectionState; import org.apache.nifi.cluster.protocol.NodeIdentifier; import org.apache.nifi.connectable.Connection; import org.apache.nifi.controller.MockFlowFileRecord; import org.apache.nifi.controller.MockSwapManager; import org.apache.nifi.controller.ProcessScheduler; import org.apache.nifi.controller.status.FlowFileAvailability; import org.apache.nifi.controller.queue.NopConnectionEventListener; import org.apache.nifi.controller.queue.QueueSize; import org.apache.nifi.controller.queue.clustered.client.async.AsyncLoadBalanceClientRegistry; import org.apache.nifi.controller.queue.clustered.partition.FlowFilePartitioner; import org.apache.nifi.controller.queue.clustered.partition.QueuePartition; import org.apache.nifi.controller.queue.clustered.partition.RoundRobinPartitioner; import org.apache.nifi.controller.repository.ContentRepository; import org.apache.nifi.controller.repository.FlowFileRecord; import org.apache.nifi.controller.repository.FlowFileRepository; import org.apache.nifi.controller.repository.SwapSummary; import org.apache.nifi.controller.repository.claim.ResourceClaimManager; import org.apache.nifi.controller.repository.claim.StandardResourceClaimManager; import org.apache.nifi.events.EventReporter; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.flowfile.FlowFilePrioritizer; import org.apache.nifi.provenance.ProvenanceEventRepository; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class TestSocketLoadBalancedFlowFileQueue { private Connection connection; private FlowFileRepository flowFileRepo; private ContentRepository contentRepo; private ProvenanceEventRepository provRepo; private ResourceClaimManager claimManager; private ClusterCoordinator clusterCoordinator; private MockSwapManager swapManager; private EventReporter eventReporter; private SocketLoadBalancedFlowFileQueue queue; private volatile ClusterTopologyEventListener clusterTopologyEventListener; private List<NodeIdentifier> nodeIds; private int nodePort = 4096; @Before public void setup() { MockFlowFileRecord.resetIdGenerator(); connection = mock(Connection.class); when(connection.getIdentifier()).thenReturn("unit-test"); flowFileRepo = mock(FlowFileRepository.class); contentRepo = mock(ContentRepository.class); provRepo = mock(ProvenanceEventRepository.class); claimManager = new StandardResourceClaimManager(); clusterCoordinator = mock(ClusterCoordinator.class); swapManager = new MockSwapManager(); eventReporter = EventReporter.NO_OP; final NodeIdentifier localNodeIdentifier = createNodeIdentifier("00000000-0000-0000-0000-000000000000"); nodeIds = new ArrayList<>(); nodeIds.add(localNodeIdentifier); nodeIds.add(createNodeIdentifier("11111111-1111-1111-1111-111111111111")); nodeIds.add(createNodeIdentifier("22222222-2222-2222-2222-222222222222")); Mockito.doAnswer(new Answer<Set<NodeIdentifier>>() { @Override public Set<NodeIdentifier> answer(InvocationOnMock invocation) throws Throwable { return new HashSet<>(nodeIds); } }).when(clusterCoordinator).getNodeIdentifiers(); when(clusterCoordinator.getLocalNodeIdentifier()).thenReturn(localNodeIdentifier); doAnswer(new Answer() { @Override public Object answer(final InvocationOnMock invocation) throws Throwable { clusterTopologyEventListener = invocation.getArgument(0); return null; } }).when(clusterCoordinator).registerEventListener(Mockito.any(ClusterTopologyEventListener.class)); final ProcessScheduler scheduler = mock(ProcessScheduler.class); final AsyncLoadBalanceClientRegistry registry = mock(AsyncLoadBalanceClientRegistry.class); queue = new SocketLoadBalancedFlowFileQueue("unit-test", new NopConnectionEventListener(), scheduler, flowFileRepo, provRepo, contentRepo, claimManager, clusterCoordinator, registry, swapManager, 10000, eventReporter); } private NodeIdentifier createNodeIdentifier() { return createNodeIdentifier(UUID.randomUUID().toString()); } private NodeIdentifier createNodeIdentifier(final String uuid) { return new NodeIdentifier(uuid, "localhost", nodePort++, "localhost", nodePort++, "localhost", nodePort++, "localhost", nodePort++, nodePort++, true, Collections.emptySet()); } @Test public void testFlowFileAvailability() { assertTrue(queue.isEmpty()); assertSame(FlowFileAvailability.ACTIVE_QUEUE_EMPTY, queue.getFlowFileAvailability()); final MockFlowFileRecord penalizedFlowFile = new MockFlowFileRecord(0L); penalizedFlowFile.setPenaltyExpiration(System.currentTimeMillis() + 500_000L); queue.put(penalizedFlowFile); assertFalse(queue.isEmpty()); assertSame(FlowFileAvailability.HEAD_OF_QUEUE_PENALIZED, queue.getFlowFileAvailability()); penalizedFlowFile.setPenaltyExpiration(System.currentTimeMillis() - 1); assertFalse(queue.isEmpty()); assertSame(FlowFileAvailability.FLOWFILE_AVAILABLE, queue.getFlowFileAvailability()); } @Test public void testPriorities() { final FlowFilePrioritizer iValuePrioritizer = new FlowFilePrioritizer() { @Override public int compare(final FlowFile o1, final FlowFile o2) { final int i1 = Integer.parseInt(o1.getAttribute("i")); final int i2 = Integer.parseInt(o2.getAttribute("i")); return Integer.compare(i1, i2); } }; queue.setPriorities(Collections.singletonList(iValuePrioritizer)); final Map<String, String> attributes = new HashMap<>(); // Add 100 FlowFiles, each with a descending 'i' value (first has i=99, second has i=98, etc.) for (int i = 99; i >= 0; i--) { attributes.put("i", String.valueOf(i)); final MockFlowFileRecord flowFile = new MockFlowFileRecord(new HashMap<>(attributes), 0L); queue.put(flowFile); } for (int i=0; i < 100; i++) { final FlowFileRecord polled = queue.poll(Collections.emptySet()); assertNotNull(polled); assertEquals(String.valueOf(i), polled.getAttribute("i")); } assertNull(queue.poll(Collections.emptySet())); } @Test public void testPrioritiesWhenSetBeforeLocalNodeIdDetermined() { final FlowFilePrioritizer iValuePrioritizer = new FlowFilePrioritizer() { @Override public int compare(final FlowFile o1, final FlowFile o2) { final int i1 = Integer.parseInt(o1.getAttribute("i")); final int i2 = Integer.parseInt(o2.getAttribute("i")); return Integer.compare(i1, i2); } }; final ProcessScheduler scheduler = mock(ProcessScheduler.class); final AsyncLoadBalanceClientRegistry registry = mock(AsyncLoadBalanceClientRegistry.class); when(clusterCoordinator.getLocalNodeIdentifier()).thenReturn(null); queue = new SocketLoadBalancedFlowFileQueue("unit-test", new NopConnectionEventListener(), scheduler, flowFileRepo, provRepo, contentRepo, claimManager, clusterCoordinator, registry, swapManager, 10000, eventReporter); queue.setPriorities(Collections.singletonList(iValuePrioritizer)); when(clusterCoordinator.getLocalNodeIdentifier()).thenReturn(null); queue.setNodeIdentifiers(new HashSet<>(nodeIds), true); final Map<String, String> attributes = new HashMap<>(); // Add 100 FlowFiles, each with a descending 'i' value (first has i=99, second has i=98, etc.) for (int i = 99; i >= 0; i--) { attributes.put("i", String.valueOf(i)); final MockFlowFileRecord flowFile = new MockFlowFileRecord(new HashMap<>(attributes), 0L); queue.put(flowFile); } for (int i=0; i < 100; i++) { final FlowFileRecord polled = queue.poll(Collections.emptySet()); assertNotNull(polled); assertEquals(String.valueOf(i), polled.getAttribute("i")); } assertNull(queue.poll(Collections.emptySet())); } @Test public void testBinsAccordingToPartitioner() { final FlowFilePartitioner partitioner = new StaticFlowFilePartitioner(1); queue.setFlowFilePartitioner(partitioner); final QueuePartition desiredPartition = queue.getPartition(1); for (int i = 0; i < 100; i++) { final MockFlowFileRecord flowFile = new MockFlowFileRecord(0L); final QueuePartition partition = queue.putAndGetPartition(flowFile); assertSame(desiredPartition, partition); } } @Test public void testPutAllBinsFlowFilesSeparately() { // Partition data based on size. FlowFiles with 0 bytes will go to partition 0 (local partition), // FlowFiles with 1 byte will go to partition 1, and FlowFiles with 2 bytes will go to partition 2. final FlowFilePartitioner partitioner = new FlowFileSizePartitioner(); queue.setFlowFilePartitioner(partitioner); // Add 3 FlowFiles for each size final List<FlowFileRecord> flowFiles = new ArrayList<>(); for (int i = 0; i < 3; i++) { flowFiles.add(new MockFlowFileRecord(0)); flowFiles.add(new MockFlowFileRecord(1)); flowFiles.add(new MockFlowFileRecord(2)); } final Map<QueuePartition, List<FlowFileRecord>> partitionMap = queue.putAllAndGetPartitions(flowFiles); assertEquals(3, partitionMap.size()); // For each partition, get the List of FlowFiles added to it, then verify that there are 3 FlowFiles with that size. for (int i = 0; i < 3; i++) { final QueuePartition partition = queue.getPartition(i); final List<FlowFileRecord> flowFilesForPartition = partitionMap.get(partition); assertNotNull(flowFilesForPartition); assertEquals(3, flowFilesForPartition.size()); for (final FlowFileRecord flowFile : flowFilesForPartition) { assertEquals(i, flowFile.getSize()); } } } private int determineRemotePartitionIndex() { final QueuePartition localPartition = queue.getLocalPartition(); if (queue.getPartition(0) == localPartition) { return 1; } else { return 0; } } private int determineLocalPartitionIndex() { final QueuePartition localPartition = queue.getLocalPartition(); for (int i=0; i < clusterCoordinator.getNodeIdentifiers().size(); i++) { if (queue.getPartition(i) == localPartition) { return i; } } throw new IllegalStateException("Could not determine local partition index"); } @Test public void testIsEmptyWhenFlowFileInRemotePartition() { queue.setFlowFilePartitioner(new StaticFlowFilePartitioner(determineRemotePartitionIndex())); assertTrue(queue.isEmpty()); assertTrue(queue.isActiveQueueEmpty()); assertEquals(new QueueSize(0, 0L), queue.size()); queue.put(new MockFlowFileRecord(0L)); assertFalse(queue.isEmpty()); assertTrue(queue.isActiveQueueEmpty()); assertEquals(new QueueSize(1, 0L), queue.size()); assertNull(queue.poll(new HashSet<>())); assertFalse(queue.isEmpty()); assertTrue(queue.isActiveQueueEmpty()); assertEquals(new QueueSize(1, 0L), queue.size()); } @Test public void testIsEmptyWhenFlowFileInLocalPartition() { queue.setFlowFilePartitioner(new StaticFlowFilePartitioner(determineLocalPartitionIndex())); // Ensure queue is empty assertTrue(queue.isEmpty()); assertTrue(queue.isActiveQueueEmpty()); assertEquals(new QueueSize(0, 0L), queue.size()); // add a flowfile final FlowFileRecord flowFile = new MockFlowFileRecord(0L); queue.put(flowFile); assertFalse(queue.isEmpty()); assertFalse(queue.isActiveQueueEmpty()); assertEquals(new QueueSize(1, 0L), queue.size()); // Ensure that we get the same FlowFile back. This will not decrement // the queue size, only acknowledging the FlowFile will do that. assertSame(flowFile, queue.poll(new HashSet<>())); assertFalse(queue.isEmpty()); assertTrue(queue.isActiveQueueEmpty()); assertEquals(new QueueSize(1, 0L), queue.size()); // Acknowledging FlowFile should reduce queue size queue.acknowledge(flowFile); assertTrue(queue.isEmpty()); assertTrue(queue.isActiveQueueEmpty()); assertEquals(new QueueSize(0, 0L), queue.size()); // Add FlowFile back in, poll it to ensure that we get it back, and // then acknowledge as a Collection and ensure the correct sizes. queue.put(flowFile); assertFalse(queue.isEmpty()); assertFalse(queue.isActiveQueueEmpty()); assertEquals(new QueueSize(1, 0L), queue.size()); assertSame(flowFile, queue.poll(new HashSet<>())); assertFalse(queue.isEmpty()); assertTrue(queue.isActiveQueueEmpty()); assertEquals(new QueueSize(1, 0L), queue.size()); queue.acknowledge(Collections.singleton(flowFile)); assertTrue(queue.isEmpty()); assertTrue(queue.isActiveQueueEmpty()); assertEquals(new QueueSize(0, 0L), queue.size()); } @Test public void testGetFlowFile() throws IOException { queue.setFlowFilePartitioner(new FlowFileSizePartitioner()); final Map<String, String> localAttributes = Collections.singletonMap("uuid", "local"); final MockFlowFileRecord localFlowFile = new MockFlowFileRecord(localAttributes, determineLocalPartitionIndex()); final Map<String, String> remoteAttributes = Collections.singletonMap("uuid", "remote"); final MockFlowFileRecord remoteFlowFile = new MockFlowFileRecord(remoteAttributes, determineRemotePartitionIndex()); queue.put(localFlowFile); queue.put(remoteFlowFile); assertSame(localFlowFile, queue.getFlowFile("local")); assertNull(queue.getFlowFile("remote")); assertNull(queue.getFlowFile("other")); } @Test public void testRecoverSwapFiles() throws IOException { long expectedMinLastQueueDate = Long.MAX_VALUE; long expectedTotalLastQueueDate = 0L; for (int partitionIndex = 0; partitionIndex < 3; partitionIndex++) { final String partitionName = queue.getPartition(partitionIndex).getSwapPartitionName(); final List<FlowFileRecord> flowFiles = new ArrayList<>(); for (int i = 0; i < 100; i++) { FlowFileRecord newMockFlowFilerecord = new MockFlowFileRecord(100L); flowFiles.add(newMockFlowFilerecord); expectedMinLastQueueDate = Long.min(expectedMinLastQueueDate, newMockFlowFilerecord.getLastQueueDate()); expectedTotalLastQueueDate += newMockFlowFilerecord.getLastQueueDate(); } swapManager.swapOut(flowFiles, queue, partitionName); } final List<FlowFileRecord> flowFiles = new ArrayList<>(); for (int i = 0; i < 100; i++) { FlowFileRecord newMockFlowFilerecord = new MockFlowFileRecord(100L); flowFiles.add(newMockFlowFilerecord); expectedMinLastQueueDate = Long.min(expectedMinLastQueueDate, newMockFlowFilerecord.getLastQueueDate()); expectedTotalLastQueueDate += newMockFlowFilerecord.getLastQueueDate(); } swapManager.swapOut(flowFiles, queue, "other-partition"); final SwapSummary swapSummary = queue.recoverSwappedFlowFiles(); assertEquals(399L, swapSummary.getMaxFlowFileId().longValue()); assertEquals(400, swapSummary.getQueueSize().getObjectCount()); assertEquals(400 * 100L, swapSummary.getQueueSize().getByteCount()); assertEquals(expectedTotalLastQueueDate, swapSummary.getTotalLastQueueDate().longValue()); assertEquals(expectedMinLastQueueDate, swapSummary.getMinLastQueueDate().longValue()); } @Test(timeout = 10000) public void testChangeInClusterTopologyTriggersRebalance() throws InterruptedException { // Create partitioner that sends first 2 FlowFiles to Partition 0, next 2 to Partition 1, and then next 4 to Partition 3. queue.setFlowFilePartitioner(new StaticSequencePartitioner(new int[] {0, 0, 1, 1, 3, 3, 3, 3}, true)); for (int i = 0; i < 4; i++) { queue.put(new MockFlowFileRecord()); } assertEquals(2, queue.getPartition(0).size().getObjectCount()); assertEquals(2, queue.getPartition(1).size().getObjectCount()); assertEquals(0, queue.getPartition(2).size().getObjectCount()); final Set<NodeIdentifier> updatedNodeIdentifiers = new HashSet<>(nodeIds); // Add a Node Identifier with an of ID consisting of a bunch of Z's so that the new partition will be Partition Number 3. updatedNodeIdentifiers.add(new NodeIdentifier("ZZZZZZZZZZZZZZ", "localhost", nodePort++, "localhost", nodePort++, "localhost", nodePort++, "localhost", nodePort++, nodePort++, true, Collections.emptySet())); queue.setNodeIdentifiers(updatedNodeIdentifiers, false); final int[] expectedPartitionSizes = new int[] {0, 0, 0, 4}; final int[] partitionSizes = new int[4]; while (!Arrays.equals(expectedPartitionSizes, partitionSizes)) { Thread.sleep(10L); for (int i = 0; i < 4; i++) { partitionSizes[i] = queue.getPartition(i).size().getObjectCount(); } } } @Test public void testOffloadAndReconnectKeepsQueueInCorrectOrder() { // Simulate FirstNodePartitioner, which always selects the first node in the partition queue queue.setFlowFilePartitioner(new StaticFlowFilePartitioner(0)); QueuePartition firstPartition = queue.putAndGetPartition(new MockFlowFileRecord()); final NodeIdentifier node1Identifier = nodeIds.get(0); final NodeIdentifier node2Identifier = nodeIds.get(1); // The local node partition starts out first Assert.assertEquals("local", firstPartition.getSwapPartitionName()); // Simulate offloading the first node clusterTopologyEventListener.onNodeStateChange(node1Identifier, NodeConnectionState.OFFLOADING); // Now the remote partition for the second node should be returned firstPartition = queue.putAndGetPartition(new MockFlowFileRecord()); Assert.assertEquals(node2Identifier, firstPartition.getNodeIdentifier().get()); // Simulate reconnecting the first node clusterTopologyEventListener.onNodeStateChange(node1Identifier, NodeConnectionState.CONNECTED); // Now the local node partition is returned again firstPartition = queue.putAndGetPartition(new MockFlowFileRecord()); Assert.assertEquals("local", firstPartition.getSwapPartitionName()); } @Test(timeout = 30000) public void testChangeInClusterTopologyTriggersRebalanceOnlyOnRemovedNodeIfNecessary() throws InterruptedException { // Create partitioner that sends first 1 FlowFile to Partition 0, next to Partition 2, and then next 2 to Partition 2. // Then, cycle back to partitions 0 and 1. This will result in partitions 0 & 1 getting 1 FlowFile each and Partition 2 // getting 2 FlowFiles. Then, when Partition 2 is removed, those 2 FlowFiles will be rebalanced to Partitions 0 and 1. queue.setFlowFilePartitioner(new StaticSequencePartitioner(new int[] {0, 1, 2, 2, 0, 1}, false)); for (int i = 0; i < 4; i++) { queue.put(new MockFlowFileRecord()); } assertEquals(1, queue.getPartition(0).size().getObjectCount()); assertEquals(1, queue.getPartition(1).size().getObjectCount()); assertEquals(2, queue.getPartition(2).size().getObjectCount()); final Set<NodeIdentifier> updatedNodeIdentifiers = new HashSet<>(); updatedNodeIdentifiers.add(nodeIds.get(0)); updatedNodeIdentifiers.add(nodeIds.get(1)); queue.setNodeIdentifiers(updatedNodeIdentifiers, false); final int[] expectedPartitionSizes = new int[] {2, 2}; final int[] partitionSizes = new int[2]; while (!Arrays.equals(expectedPartitionSizes, partitionSizes)) { Thread.sleep(10L); for (int i = 0; i < 2; i++) { partitionSizes[i] = queue.getPartition(i).size().getObjectCount(); } } } @Test(timeout = 10000) public void testChangeInPartitionerTriggersRebalance() throws InterruptedException { // Create partitioner that sends first 2 FlowFiles to Partition 0, next 2 to Partition 1, and then next 4 to Partition 3. queue.setFlowFilePartitioner(new StaticSequencePartitioner(new int[] {0, 1, 0, 1}, false)); for (int i = 0; i < 4; i++) { queue.put(new MockFlowFileRecord()); } assertEquals(2, queue.getPartition(0).size().getObjectCount()); assertEquals(2, queue.getPartition(1).size().getObjectCount()); assertEquals(0, queue.getPartition(2).size().getObjectCount()); queue.setFlowFilePartitioner(new StaticSequencePartitioner(new int[] {0, 1, 2, 2}, true)); final int[] expectedPartitionSizes = new int[] {1, 1, 2}; assertPartitionSizes(expectedPartitionSizes); } @Test(timeout = 10000) public void testDataInRemotePartitionForLocalIdIsMovedToLocalPartition() throws InterruptedException { nodeIds.clear(); final NodeIdentifier id1 = createNodeIdentifier(); final NodeIdentifier id2 = createNodeIdentifier(); final NodeIdentifier id3 = createNodeIdentifier(); nodeIds.add(id1); nodeIds.add(id2); nodeIds.add(id3); when(clusterCoordinator.getLocalNodeIdentifier()).thenReturn(null); final AsyncLoadBalanceClientRegistry registry = mock(AsyncLoadBalanceClientRegistry.class); queue = new SocketLoadBalancedFlowFileQueue("unit-test", new NopConnectionEventListener(), mock(ProcessScheduler.class), flowFileRepo, provRepo, contentRepo, claimManager, clusterCoordinator, registry, swapManager, 10000, eventReporter); queue.setFlowFilePartitioner(new RoundRobinPartitioner()); // Queue up data without knowing the local node id. final Map<String, String> attributes = new HashMap<>(); for (int i=0; i < 6; i++) { attributes.put("i", String.valueOf(i)); queue.put(new MockFlowFileRecord(attributes, 0)); } when(clusterCoordinator.getLocalNodeIdentifier()).thenReturn(id1); clusterTopologyEventListener.onLocalNodeIdentifierSet(id1); assertEquals(6, queue.size().getObjectCount()); // Ensure that the partitions' object sizes add up to 6. This could take a short time because rebalancing will occur. // So we wait in a loop. while (true) { int totalObjectCount = 0; for (int i = 0; i < queue.getPartitionCount(); i++) { totalObjectCount += queue.getPartition(i).size().getObjectCount(); } if (totalObjectCount == 6) { break; } } assertEquals(3, queue.getPartitionCount()); } private void assertPartitionSizes(final int[] expectedSizes) { final int[] partitionSizes = new int[queue.getPartitionCount()]; while (!Arrays.equals(expectedSizes, partitionSizes)) { try { Thread.sleep(10L); } catch (InterruptedException e) { Assert.fail("Interrupted"); } for (int i = 0; i < partitionSizes.length; i++) { partitionSizes[i] = queue.getPartition(i).size().getObjectCount(); } } } private static class StaticFlowFilePartitioner implements FlowFilePartitioner { private final int partitionIndex; public StaticFlowFilePartitioner(final int partition) { this.partitionIndex = partition; } @Override public QueuePartition getPartition(final FlowFileRecord flowFile, final QueuePartition[] partitions, final QueuePartition localPartition) { return partitions[partitionIndex]; } @Override public boolean isRebalanceOnClusterResize() { return false; } @Override public boolean isRebalanceOnFailure() { return false; } } private static class FlowFileSizePartitioner implements FlowFilePartitioner { @Override public QueuePartition getPartition(final FlowFileRecord flowFile, final QueuePartition[] partitions, final QueuePartition localPartition) { return partitions[(int) flowFile.getSize()]; } @Override public boolean isRebalanceOnClusterResize() { return false; } @Override public boolean isRebalanceOnFailure() { return false; } } private static class StaticSequencePartitioner implements FlowFilePartitioner { private final int[] partitionIndices; private final boolean requireRebalance; private int index = 0; public StaticSequencePartitioner(final int[] partitions, final boolean requireRebalance) { this.partitionIndices = partitions; this.requireRebalance = requireRebalance; } @Override public QueuePartition getPartition(final FlowFileRecord flowFile, final QueuePartition[] partitions, final QueuePartition localPartition) { final int partitionIndex = partitionIndices[index++]; return partitions[partitionIndex]; } @Override public boolean isRebalanceOnClusterResize() { return requireRebalance; } @Override public boolean isRebalanceOnFailure() { return false; } } }
package org.keycloak.connections.httpclient; import org.apache.http.client.config.RequestConfig; import org.apache.http.conn.ssl.AllowAllHostnameVerifier; import org.apache.http.conn.ssl.BrowserCompatHostnameVerifier; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.conn.ssl.SSLContexts; import org.apache.http.conn.ssl.StrictHostnameVerifier; import org.apache.http.conn.ssl.X509HostnameVerifier; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLException; import javax.net.ssl.SSLSession; import javax.net.ssl.SSLSocket; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import java.io.IOException; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import java.util.concurrent.TimeUnit; /** * Abstraction for creating HttpClients. Allows SSL configuration. * * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class HttpClientBuilder { public static enum HostnameVerificationPolicy { /** * Hostname verification is not done on the server's certificate */ ANY, /** * Allows wildcards in subdomain names i.e. *.foo.com */ WILDCARD, /** * CN must match hostname connecting to */ STRICT } /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ private static class PassthroughTrustManager implements X509TrustManager { public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { } public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { } public X509Certificate[] getAcceptedIssuers() { return null; } } protected KeyStore truststore; protected KeyStore clientKeyStore; protected String clientPrivateKeyPassword; protected boolean disableTrustManager; protected HostnameVerificationPolicy policy = HostnameVerificationPolicy.WILDCARD; protected SSLContext sslContext; protected int connectionPoolSize = 100; protected int maxPooledPerRoute = 0; protected long connectionTTL = -1; protected TimeUnit connectionTTLUnit = TimeUnit.MILLISECONDS; protected HostnameVerifier verifier = null; protected long socketTimeout = -1; protected TimeUnit socketTimeoutUnits = TimeUnit.MILLISECONDS; protected long establishConnectionTimeout = -1; protected TimeUnit establishConnectionTimeoutUnits = TimeUnit.MILLISECONDS; protected boolean disableCookies = false; /** * Socket inactivity timeout * * @param timeout * @param unit * @return */ public HttpClientBuilder socketTimeout(long timeout, TimeUnit unit) { this.socketTimeout = timeout; this.socketTimeoutUnits = unit; return this; } /** * When trying to make an initial socket connection, what is the timeout? * * @param timeout * @param unit * @return */ public HttpClientBuilder establishConnectionTimeout(long timeout, TimeUnit unit) { this.establishConnectionTimeout = timeout; this.establishConnectionTimeoutUnits = unit; return this; } public HttpClientBuilder connectionTTL(long ttl, TimeUnit unit) { this.connectionTTL = ttl; this.connectionTTLUnit = unit; return this; } public HttpClientBuilder maxPooledPerRoute(int maxPooledPerRoute) { this.maxPooledPerRoute = maxPooledPerRoute; return this; } public HttpClientBuilder connectionPoolSize(int connectionPoolSize) { this.connectionPoolSize = connectionPoolSize; return this; } /** * Disable trust management and hostname verification. <i>NOTE</i> this is a security * hole, so only set this option if you cannot or do not want to verify the identity of the * host you are communicating with. */ public HttpClientBuilder disableTrustManager() { this.disableTrustManager = true; return this; } /** * Disable cookie management. */ public HttpClientBuilder disableCookies(boolean disable) { this.disableTrustManager = disable; return this; } /** * SSL policy used to verify hostnames * * @param policy * @return */ public HttpClientBuilder hostnameVerification(HostnameVerificationPolicy policy) { this.policy = policy; return this; } public HttpClientBuilder sslContext(SSLContext sslContext) { this.sslContext = sslContext; return this; } public HttpClientBuilder trustStore(KeyStore truststore) { this.truststore = truststore; return this; } public HttpClientBuilder keyStore(KeyStore keyStore, String password) { this.clientKeyStore = keyStore; this.clientPrivateKeyPassword = password; return this; } public HttpClientBuilder keyStore(KeyStore keyStore, char[] password) { this.clientKeyStore = keyStore; this.clientPrivateKeyPassword = new String(password); return this; } static class VerifierWrapper implements X509HostnameVerifier { protected HostnameVerifier verifier; VerifierWrapper(HostnameVerifier verifier) { this.verifier = verifier; } @Override public void verify(String host, SSLSocket ssl) throws IOException { if (!verifier.verify(host, ssl.getSession())) throw new SSLException("Hostname verification failure"); } @Override public void verify(String host, X509Certificate cert) throws SSLException { throw new SSLException("This verification path not implemented"); } @Override public void verify(String host, String[] cns, String[] subjectAlts) throws SSLException { throw new SSLException("This verification path not implemented"); } @Override public boolean verify(String s, SSLSession sslSession) { return verifier.verify(s, sslSession); } } public CloseableHttpClient build() { X509HostnameVerifier verifier = null; if (this.verifier != null) verifier = new VerifierWrapper(this.verifier); else { switch (policy) { case ANY: verifier = new AllowAllHostnameVerifier(); break; case WILDCARD: verifier = new BrowserCompatHostnameVerifier(); break; case STRICT: verifier = new StrictHostnameVerifier(); break; } } try { SSLConnectionSocketFactory sslsf = null; SSLContext theContext = sslContext; if (disableTrustManager) { theContext = SSLContext.getInstance("TLS"); theContext.init(null, new TrustManager[]{new PassthroughTrustManager()}, new SecureRandom()); verifier = new AllowAllHostnameVerifier(); sslsf = new SSLConnectionSocketFactory(theContext, verifier); } else if (theContext != null) { sslsf = new SSLConnectionSocketFactory(theContext, verifier); } else if (clientKeyStore != null || truststore != null) { theContext = createSslContext("TLS", clientKeyStore, clientPrivateKeyPassword, truststore, null); sslsf = new SSLConnectionSocketFactory(theContext, verifier); } else { final SSLContext tlsContext = SSLContext.getInstance("TLS"); tlsContext.init(null, null, null); sslsf = new SSLConnectionSocketFactory(tlsContext, verifier); } RequestConfig requestConfig = RequestConfig.custom() .setConnectTimeout((int) establishConnectionTimeout) .setSocketTimeout((int) socketTimeout).build(); org.apache.http.impl.client.HttpClientBuilder builder = HttpClients.custom() .setDefaultRequestConfig(requestConfig) .setSSLSocketFactory(sslsf) .setMaxConnTotal(connectionPoolSize) .setMaxConnPerRoute(maxPooledPerRoute); if (disableCookies) builder.disableCookieManagement(); return builder.build(); } catch (Exception e) { throw new RuntimeException(e); } } private SSLContext createSslContext( final String algorithm, final KeyStore keystore, final String keyPassword, final KeyStore truststore, final SecureRandom random) throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException, UnrecoverableKeyException { return SSLContexts.custom() .useProtocol(algorithm) .setSecureRandom(random) .loadKeyMaterial(keystore, keyPassword != null ? keyPassword.toCharArray() : null) .loadTrustMaterial(truststore) .build(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sshd.common.session; import java.io.IOException; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import org.apache.mina.core.buffer.IoBuffer; import org.apache.mina.core.future.IoFuture; import org.apache.mina.core.future.IoFutureListener; import org.apache.mina.core.future.WriteFuture; import org.apache.mina.core.session.IoSession; import org.apache.sshd.client.channel.AbstractClientChannel; import org.apache.sshd.common.Channel; import org.apache.sshd.common.Cipher; import org.apache.sshd.common.Compression; import org.apache.sshd.common.Digest; import org.apache.sshd.common.FactoryManager; import org.apache.sshd.common.KeyExchange; import org.apache.sshd.common.Mac; import org.apache.sshd.common.NamedFactory; import org.apache.sshd.common.Random; import org.apache.sshd.common.Session; import org.apache.sshd.common.SshConstants; import org.apache.sshd.common.SshException; import org.apache.sshd.common.future.CloseFuture; import org.apache.sshd.common.future.DefaultCloseFuture; import org.apache.sshd.common.future.SshFuture; import org.apache.sshd.common.future.SshFutureListener; import org.apache.sshd.common.util.Buffer; import org.apache.sshd.common.util.BufferUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.sshd.common.util.LogUtils; /** * The AbstractSession handles all the basic SSH protocol such as key exchange, authentication, * encoding and decoding. Both server side and client side sessions should inherit from this * abstract class. Some basic packet processing methods are defined but the actual call to these * methods should be done from the {@link #handleMessage(org.apache.sshd.common.util.Buffer)} * method, which is dependant on the state and side of this session. * * TODO: if there is any very big packet, decoderBuffer and uncompressBuffer will get quite big * and they won't be resized down at any time. Though the packet size is really limited * by the channel max packet size * * @author <a href="mailto:dev@mina.apache.org">Apache MINA SSHD Project</a> */ public abstract class AbstractSession implements Session { /** * Name of the property where this session is stored in the attributes of the * underlying MINA session. See {@link #getSession(IoSession, boolean)} * and {@link #attachSession(IoSession, AbstractSession)}. */ public static final String SESSION = "org.apache.sshd.session"; /** Our Log */ protected final Log log = LogFactory.getLog(getClass()); /** The factory manager used to retrieve factories of Ciphers, Macs and other objects */ protected final FactoryManager factoryManager; /** The underlying MINA session */ protected final IoSession ioSession; /** The pseudo random generator */ protected final Random random; /** Lock object for this session state */ protected final Object lock = new Object(); /** * A future that will be set 'closed' when the connection is closed. */ protected final CloseFuture closeFuture = new DefaultCloseFuture(lock); /** The session is being closed */ protected volatile boolean closing; /** Boolean indicating if this session has been authenticated or not */ protected boolean authed; /** Map of channels keyed by the identifier */ protected final Map<Integer, Channel> channels = new ConcurrentHashMap<Integer, Channel>(); /** Next channel identifier */ protected int nextChannelId; // // Key exchange support // protected byte[] sessionId; protected String serverVersion; protected String clientVersion; protected String[] serverProposal; protected String[] clientProposal; protected String[] negociated; protected byte[] I_C; // the payload of the client's SSH_MSG_KEXINIT protected byte[] I_S; // the payload of the factoryManager's SSH_MSG_KEXINIT protected KeyExchange kex; // // SSH packets encoding / decoding support // protected Cipher outCipher; protected Cipher inCipher; protected int outCipherSize = 8; protected int inCipherSize = 8; protected Mac outMac; protected Mac inMac; protected byte[] inMacResult; protected Compression outCompression; protected Compression inCompression; protected long seqi; protected long seqo; protected Buffer decoderBuffer = new Buffer(); protected Buffer uncompressBuffer; protected int decoderState; protected int decoderLength; protected final Object encodeLock = new Object(); protected final Object decodeLock = new Object(); protected final Map<AttributeKey<?>, Object> attributes = new ConcurrentHashMap<AttributeKey<?>, Object>(); /** * Create a new session. * * @param factoryManager the factory manager * @param ioSession the underlying MINA session */ public AbstractSession(FactoryManager factoryManager, IoSession ioSession) { this.factoryManager = factoryManager; this.ioSession = ioSession; this.random = factoryManager.getRandomFactory().create(); } /** * Retrieve the session from the MINA session. * If the session has not been attached, an IllegalStateException * will be thrown * * @param ioSession the MINA session * @return the session attached to the MINA session */ public static final AbstractSession getSession(IoSession ioSession) { return getSession(ioSession, false); } /** * Retrieve the session from the MINA session. * If the session has not been attached and allowNull is <code>false</code>, * an IllegalStateException will be thrown, else a <code>null</code> will * be returned * * @param ioSession the MINA session * @param allowNull if <code>true</code>, a <code>null</code> value may be * returned if no session is attached * @return the session attached to the MINA session or <code>null</code> */ public static final AbstractSession getSession(IoSession ioSession, boolean allowNull) { AbstractSession session = (AbstractSession) ioSession.getAttribute(SESSION); if (!allowNull && session == null) { throw new IllegalStateException("No session available"); } return session; } /** * Attach a session to the MINA session * * @param ioSession the MINA session * @param session the session to attach */ public static final void attachSession(IoSession ioSession, AbstractSession session) { ioSession.setAttribute(SESSION, session); } /** * Retrieve the mina session * * @return the mina session */ public IoSession getIoSession() { return ioSession; } /** * Retrieve the factory manager * * @return the factory manager for this session */ public FactoryManager getFactoryManager() { return factoryManager; } /** * Main input point for the MINA framework. * * This method will be called each time new data is received on * the socket and will append it to the input buffer before * calling the {@link #decode()} method. * * @param buffer the new buffer received * @throws Exception if an error occurs while decoding or handling the data */ public void messageReceived(IoBuffer buffer) throws Exception { synchronized (decodeLock) { decoderBuffer.putBuffer(buffer); // One of those property will be set by the constructor and the other // one should be set by the readIdentification method if (clientVersion == null || serverVersion == null) { if (readIdentification(decoderBuffer)) { decoderBuffer.compact(); } else { return; } } decode(); } } /** * Abstract method for processing incoming decoded packets. * The given buffer will hold the decoded packet, starting from * the command byte at the read position. * Packets must be processed within this call or be copied because * the given buffer is meant to be changed and updated when this * method returns. * * @param buffer the buffer containing the packet * @throws Exception if an exeption occurs while handling this packet. */ protected abstract void handleMessage(Buffer buffer) throws Exception; /** * Handle any exceptions that occured on this session. * The session will be closed and a disconnect packet will be * sent before if the given exception is an * {@link org.apache.sshd.common.SshException}. * * @param t the exception to process * @throws IOException */ public void exceptionCaught(Throwable t) { log.warn("Exception caught", t); try { if (t instanceof SshException) { int code = ((SshException) t).getDisconnectCode(); if (code > 0) { disconnect(code, t.getMessage()); return; } } } catch (Throwable t2) { // Ignore } close(true); } /** * Close this session. * This method will close all channels, then close the underlying MINA session. * The call will not block until the mina session is actually closed. */ public CloseFuture close(final boolean immediately) { class IoSessionCloser implements IoFutureListener { public void operationComplete(IoFuture future) { synchronized (lock) { log.debug("IoSession closed"); closeFuture.setClosed(); lock.notifyAll(); } } }; synchronized (lock) { if (!closing) { try { closing = true; log.info("Closing session"); Channel[] channelToClose = channels.values().toArray(new Channel[0]); if (channelToClose.length > 0) { final AtomicInteger latch = new AtomicInteger(channelToClose.length); for (Channel channel : channelToClose) { LogUtils.debug(log,"Closing channel {0}", channel.getId()); channel.close(immediately).addListener(new SshFutureListener() { public void operationComplete(SshFuture sshFuture) { if (latch.decrementAndGet() == 0) { log.debug("Closing IoSession"); ioSession.close(true).addListener(new IoSessionCloser()); } } }); } } else { log.debug("Closing IoSession"); ioSession.close(immediately).addListener(new IoSessionCloser()); } } catch (Throwable t) { log.warn("Error closing session", t); } } return closeFuture; } } /** * Encode and send the given buffer. * The buffer has to have 5 bytes free at the beginning to allow the encoding to take place. * Also, the write position of the buffer has to be set to the position of the last byte to write. * * @param buffer the buffer to encode and send * @return a future that can be used to check when the packet has actually been sent * @throws java.io.IOException if an error occured when encoding sending the packet */ public WriteFuture writePacket(Buffer buffer) throws IOException { // Synchronize all write requests as needed by the encoding algorithm // and also queue the write request in this synchronized block to ensure // packets are sent in the correct order synchronized (encodeLock) { encode(buffer); IoBuffer bb = IoBuffer.wrap(buffer.array(), buffer.rpos(), buffer.available()); return ioSession.write(bb); } } /** * Create a new buffer for the specified SSH packet and reserve the needed space * (5 bytes) for the packet header. * * @param cmd the SSH command * @param len estimated number of bytes the buffer will hold, 0 if unknown. * @return a new buffer ready for write */ public Buffer createBuffer(SshConstants.Message cmd, int len) { Buffer buffer; if (len <= 0) { buffer = new Buffer(); } else { // Since the caller claims to know how many bytes they will need // increase their request to account for our headers/footers if // they actually send exactly this amount. // int bsize = outCipherSize; int oldLen = len; len += 5; int pad = (-len) & (bsize - 1); if (pad < bsize) { pad += bsize; } len = len + pad - 4; if (outMac != null) { len += outMac.getBlockSize(); } buffer = new Buffer(new byte[Math.max(len, Buffer.DEFAULT_SIZE)], false); } buffer.rpos(5); buffer.wpos(5); buffer.putByte(cmd.toByte()); return buffer; } /** * Encode a buffer into the SSH protocol. * This method need to be called into a synchronized block around encodeLock * * @param buffer the buffer to encode * @throws IOException if an exception occurs during the encoding process */ private void encode(Buffer buffer) throws IOException { try { // Check that the packet has some free space for the header if (buffer.rpos() < 5) { log.warn("Performance cost: when sending a packet, ensure that " + "5 bytes are available in front of the buffer"); Buffer nb = new Buffer(); nb.wpos(5); nb.putBuffer(buffer); buffer = nb; } // Grab the length of the packet (excluding the 5 header bytes) int len = buffer.available(); int off = buffer.rpos() - 5; // Debug log the packet if (log.isTraceEnabled()) { log.trace("Sending packet #"+seqo+": "+buffer.printHex()); } // Compress the packet if needed if (outCompression != null && (authed || !outCompression.isDelayed())) { outCompression.compress(buffer); len = buffer.available(); } // Compute padding length int bsize = outCipherSize; int oldLen = len; len += 5; int pad = (-len) & (bsize - 1); if (pad < bsize) { pad += bsize; } len = len + pad - 4; // Write 5 header bytes buffer.wpos(off); buffer.putInt(len); buffer.putByte((byte) pad); // Fill padding buffer.wpos(off + oldLen + 5 + pad); random.fill(buffer.array(), buffer.wpos() - pad, pad); // Compute mac if (outMac != null) { int macSize = outMac.getBlockSize(); int l = buffer.wpos(); buffer.wpos(l + macSize); outMac.updateUInt(seqo); outMac.update(buffer.array(), off, l); outMac.doFinal(buffer.array(), l); } // Encrypt packet, excluding mac if (outCipher != null) { outCipher.update(buffer.array(), off, len + 4); } // Increment packet id seqo = (seqo + 1) & 0xffffffffL; // Make buffer ready to be read buffer.rpos(off); } catch (SshException e) { throw e; } catch (Exception e) { throw new SshException(e); } } /** * Decode the incoming buffer and handle packets as needed. * * @throws Exception */ protected void decode() throws Exception { // Decoding loop for (;;) { // Wait for beginning of packet if (decoderState == 0) { // The read position should always be 0 at this point because we have compacted this buffer assert decoderBuffer.rpos() == 0; // If we have received enough bytes, start processing those if (decoderBuffer.available() > inCipherSize) { // Decrypt the first bytes if (inCipher != null) { inCipher.update(decoderBuffer.array(), 0, inCipherSize); } // Read packet length decoderLength = decoderBuffer.getInt(); // Check packet length validity if (decoderLength < 5 || decoderLength > (256 * 1024)) { LogUtils.info(log,"Error decoding packet (invalid length) {0}", decoderBuffer.printHex()); throw new SshException(SshConstants.SSH2_DISCONNECT_PROTOCOL_ERROR, "Invalid packet length: " + decoderLength); } // Ok, that's good, we can go to the next step decoderState = 1; } else { // need more data break; } // We have received the beinning of the packet } else if (decoderState == 1) { // The read position should always be 4 at this point assert decoderBuffer.rpos() == 4; int macSize = inMac != null ? inMac.getBlockSize() : 0; // Check if the packet has been fully received if (decoderBuffer.available() >= decoderLength + macSize) { byte[] data = decoderBuffer.array(); // Decrypt the remaining of the packet if (inCipher != null){ inCipher.update(data, inCipherSize, decoderLength + 4 - inCipherSize); } // Check the mac of the packet if (inMac != null) { // Update mac with packet id inMac.updateUInt(seqi); // Update mac with packet data inMac.update(data, 0, decoderLength + 4); // Compute mac result inMac.doFinal(inMacResult, 0); // Check the computed result with the received mac (just after the packet data) if (!BufferUtils.equals(inMacResult, 0, data, decoderLength + 4, macSize)) { throw new SshException(SshConstants.SSH2_DISCONNECT_MAC_ERROR, "MAC Error"); } } // Increment incoming packet sequence number seqi = (seqi + 1) & 0xffffffffL; // Get padding byte pad = decoderBuffer.getByte(); Buffer buf; int wpos = decoderBuffer.wpos(); // Decompress if needed if (inCompression != null && (authed || !inCompression.isDelayed())) { if (uncompressBuffer == null) { uncompressBuffer = new Buffer(); } else { uncompressBuffer.clear(); } decoderBuffer.wpos(decoderBuffer.rpos() + decoderLength - 1 - pad); inCompression.uncompress(decoderBuffer, uncompressBuffer); buf = uncompressBuffer; } else { decoderBuffer.wpos(decoderLength + 4 - pad); buf = decoderBuffer; } if (log.isTraceEnabled()) { log.trace("Received packet #"+seqi+": "+ buf.printHex()); } // Process decoded packet handleMessage(buf); // Set ready to handle next packet decoderBuffer.rpos(decoderLength + 4 + macSize); decoderBuffer.wpos(wpos); decoderBuffer.compact(); decoderState = 0; } else { // need more data break; } } } } /** * Send our identification. * * @param ident our identification to send */ protected void sendIdentification(String ident) { IoBuffer buffer = IoBuffer.allocate(32); buffer.setAutoExpand(true); buffer.put((ident + "\r\n").getBytes()); buffer.flip(); ioSession.write(buffer); } /** * Read the other side identification. * This method is specific to the client or server side, but both should call * {@link #doReadIdentification(org.apache.sshd.common.util.Buffer)} and * store the result in the needed property. * * @param buffer the buffer containing the remote identification * @return <code>true</code> if the identification has been fully read or * <code>false</code> if more data is needed * @throws IOException if an error occurs such as a bad protocol version */ protected abstract boolean readIdentification(Buffer buffer) throws IOException; /** * Read the remote identification from this buffer. * If more data is needed, the buffer will be reset to its original state * and a <code>null</code> value will be returned. Else the identification * string will be returned and the data read will be consumed from the buffer. * * @param buffer the buffer containing the identification string * @return the remote identification or <code>null</code> if more data is needed */ protected String doReadIdentification(Buffer buffer) { byte[] data = new byte[256]; for (;;) { int rpos = buffer.rpos(); int pos = 0; boolean needLf = false; for (;;) { if (buffer.available() == 0) { // Need more data, so undo reading and return null buffer.rpos(rpos); return null; } byte b = buffer.getByte(); if (b == '\r') { needLf = true; continue; } if (b == '\n') { break; } if (needLf) { throw new IllegalStateException("Incorrect identification: bad line ending"); } if (pos >= data.length) { throw new IllegalStateException("Incorrect identification: line too long"); } data[pos++] = b; } String str = new String(data, 0, pos); if (str.startsWith("SSH-")) { return str; } if (buffer.rpos() > 16 * 1024) { throw new IllegalStateException("Incorrect identification: too many header lines"); } } } /** * Create our proposal for SSH negociation * * @param hostKeyTypes the list of supported host key types * @return an array of 10 strings holding this proposal */ protected String[] createProposal(String hostKeyTypes) { return new String[] { NamedFactory.Utils.getNames(factoryManager.getKeyExchangeFactories()), hostKeyTypes, NamedFactory.Utils.getNames(factoryManager.getCipherFactories()), NamedFactory.Utils.getNames(factoryManager.getCipherFactories()), NamedFactory.Utils.getNames(factoryManager.getMacFactories()), NamedFactory.Utils.getNames(factoryManager.getMacFactories()), NamedFactory.Utils.getNames(factoryManager.getCompressionFactories()), NamedFactory.Utils.getNames(factoryManager.getCompressionFactories()), "", "" }; } /** * Send the key exchange initialization packet. * This packet contains random data along with our proposal. * * @param proposal our proposal for key exchange negociation * @return the sent packet which must be kept for later use * @throws IOException if an error occured sending the packet */ protected byte[] sendKexInit(String[] proposal) throws IOException { Buffer buffer = createBuffer(SshConstants.Message.SSH_MSG_KEXINIT, 0); int p = buffer.wpos(); buffer.wpos(p + 16); random.fill(buffer.array(), p, 16); for (String s : proposal) { buffer.putString(s); } buffer.putByte((byte) 0); buffer.putInt(0); byte[] data = buffer.getCompactData(); writePacket(buffer); return data; } /** * Receive the remote key exchange init message. * The packet data is returned for later use. * * @param buffer the buffer containing the key exchange init packet * @param proposal the remote proposal to fill * @return the packet data */ protected byte[] receiveKexInit(Buffer buffer, String[] proposal) { // Recreate the packet payload which will be needed at a later time int size = 22; byte[] d = buffer.array(); byte[] data = new byte[buffer.available() + 1]; data[0] = SshConstants.Message.SSH_MSG_KEXINIT.toByte(); System.arraycopy(d, buffer.rpos(), data, 1, data.length - 1); // Skip 16 bytes of random data buffer.rpos(buffer.rpos() + 16); // Read proposal for (int i = 0; i < proposal.length; i++) { size += 4; proposal[i] = buffer.getString(); size += proposal[i].length(); } // Skip 5 bytes buffer.getByte(); buffer.getInt(); // Return data byte[] dataShrinked = new byte[size]; System.arraycopy(data, 0, dataShrinked, 0, size); return dataShrinked; } /** * Send a message to put new keys into use. * * @throws IOException if an error occurs sending the message */ protected void sendNewKeys() throws IOException { log.info("Send SSH_MSG_NEWKEYS"); Buffer buffer = createBuffer(SshConstants.Message.SSH_MSG_NEWKEYS, 0); writePacket(buffer); } /** * Put new keys into use. * This method will intialize the ciphers, digests, macs and compression * according to the negociated server and client proposals. * * @param isServer boolean indicating if this session is on the server or the client side * @throws Exception if an error occurs */ protected void receiveNewKeys(boolean isServer) throws Exception { byte[] IVc2s; byte[] IVs2c; byte[] Ec2s; byte[] Es2c; byte[] MACc2s; byte[] MACs2c; byte[] K = kex.getK(); byte[] H = kex.getH(); Digest hash = kex.getHash(); Cipher s2ccipher; Cipher c2scipher; Mac s2cmac; Mac c2smac; Compression s2ccomp; Compression c2scomp; if (sessionId == null) { sessionId = new byte[H.length]; System.arraycopy(H, 0, sessionId, 0, H.length); } Buffer buffer = new Buffer(); buffer.putMPInt(K); buffer.putRawBytes(H); buffer.putByte((byte) 0x41); buffer.putRawBytes(sessionId); int pos = buffer.available(); byte[] buf = buffer.array(); hash.update(buf, 0, pos); IVc2s = hash.digest(); int j = pos - sessionId.length - 1; buf[j]++; hash.update(buf, 0, pos); IVs2c = hash.digest(); buf[j]++; hash.update(buf, 0, pos); Ec2s = hash.digest(); buf[j]++; hash.update(buf, 0, pos); Es2c = hash.digest(); buf[j]++; hash.update(buf, 0, pos); MACc2s = hash.digest(); buf[j]++; hash.update(buf, 0, pos); MACs2c = hash.digest(); s2ccipher = NamedFactory.Utils.create(factoryManager.getCipherFactories(), negociated[SshConstants.PROPOSAL_ENC_ALGS_STOC]); Es2c = resizeKey(Es2c, s2ccipher.getBlockSize(), hash, K, H); s2ccipher.init(isServer ? Cipher.Mode.Encrypt : Cipher.Mode.Decrypt, Es2c, IVs2c); s2cmac = NamedFactory.Utils.create(factoryManager.getMacFactories(), negociated[SshConstants.PROPOSAL_MAC_ALGS_STOC]); s2cmac.init(MACs2c); c2scipher = NamedFactory.Utils.create(factoryManager.getCipherFactories(), negociated[SshConstants.PROPOSAL_ENC_ALGS_CTOS]); Ec2s = resizeKey(Ec2s, c2scipher.getBlockSize(), hash, K, H); c2scipher.init(isServer ? Cipher.Mode.Decrypt : Cipher.Mode.Encrypt, Ec2s, IVc2s); c2smac = NamedFactory.Utils.create(factoryManager.getMacFactories(), negociated[SshConstants.PROPOSAL_MAC_ALGS_CTOS]); c2smac.init(MACc2s); s2ccomp = NamedFactory.Utils.create(factoryManager.getCompressionFactories(), negociated[SshConstants.PROPOSAL_COMP_ALGS_STOC]); c2scomp = NamedFactory.Utils.create(factoryManager.getCompressionFactories(), negociated[SshConstants.PROPOSAL_COMP_ALGS_CTOS]); if (isServer) { outCipher = s2ccipher; outMac = s2cmac; outCompression = s2ccomp; inCipher = c2scipher; inMac = c2smac; inCompression = c2scomp; } else { outCipher = c2scipher; outMac = c2smac; outCompression = c2scomp; inCipher = s2ccipher; inMac = s2cmac; inCompression = s2ccomp; } outCipherSize = outCipher.getIVSize(); if (outCompression != null) { outCompression.init(Compression.Type.Deflater, -1); } inCipherSize = inCipher.getIVSize(); inMacResult = new byte[inMac.getBlockSize()]; if (inCompression != null) { inCompression.init(Compression.Type.Inflater, -1); } } /** * Private method used while putting new keys into use that will resize the key used to * initialize the cipher to the needed length. * * @param E the key to resize * @param blockSize the cipher block size * @param hash the hash algorithm * @param K the key exchange K parameter * @param H the key exchange H parameter * @return the resize key * @throws Exception if a problem occur while resizing the key */ private byte[] resizeKey(byte[] E, int blockSize, Digest hash, byte[] K, byte[] H) throws Exception { while (blockSize > E.length) { Buffer buffer = new Buffer(); buffer.putMPInt(K); buffer.putRawBytes(H); buffer.putRawBytes(E); hash.update(buffer.array(), 0, buffer.available()); byte[] foo = hash.digest(); byte[] bar = new byte[E.length + foo.length]; System.arraycopy(E, 0, bar, 0, E.length); System.arraycopy(foo, 0, bar, E.length, foo.length); E = bar; } return E; } /** * Send a disconnect packet with the given reason and message. * Once the packet has been sent, the session will be closed * asynchronously. * * @param reason the reason code for this disconnect * @param msg the text message * @throws IOException if an error occured sending the packet */ public void disconnect(int reason, String msg) throws IOException { Buffer buffer = createBuffer(SshConstants.Message.SSH_MSG_DISCONNECT, 0); buffer.putInt(reason); buffer.putString(msg); buffer.putString(""); WriteFuture f = writePacket(buffer); f.addListener(new IoFutureListener() { public void operationComplete(IoFuture future) { close(false); } }); } /** * Send an unimplemented packet. This packet should contain the * sequence id of the usupported packet: this number is assumed to * be the last packet received. * * @throws IOException if an error occured sending the packet */ protected void notImplemented() throws IOException { Buffer buffer = createBuffer(SshConstants.Message.SSH_MSG_UNIMPLEMENTED, 0); buffer.putInt(seqi - 1); writePacket(buffer); } /** * Compute the negociated proposals by merging the client and * server proposal. The negocatiated proposal will be stored in * the {@link #negociated} property. */ protected void negociate() { String[] guess = new String[SshConstants.PROPOSAL_MAX]; for (int i = 0; i < SshConstants.PROPOSAL_MAX; i++) { String[] c = clientProposal[i].split(","); String[] s = serverProposal[i].split(","); for (String ci : c) { for (String si : s) { if (ci.equals(si)) { guess[i] = ci; break; } } if (guess[i] != null) { break; } } if (guess[i] == null && i != SshConstants.PROPOSAL_LANG_CTOS && i != SshConstants.PROPOSAL_LANG_STOC) { throw new IllegalStateException("Unable to negociate key exchange for item " + i); } } negociated = guess; } protected int getNextChannelId() { synchronized (channels) { return nextChannelId++; } } public int registerChannel(Channel channel) throws Exception { int channelId = getNextChannelId(); channel.init(this, channelId); channels.put(channelId, channel); return channelId; } protected void channelOpenConfirmation(Buffer buffer) throws IOException { Channel channel = getChannel(buffer); LogUtils.info(log,"Received SSH_MSG_CHANNEL_OPEN_CONFIRMATION on channel {0}", channel.getId()); int recipient = buffer.getInt(); int rwsize = buffer.getInt(); int rmpsize = buffer.getInt(); channel.handleOpenSuccess(recipient, rwsize, rmpsize, buffer); } protected void channelOpenFailure(Buffer buffer) throws IOException { AbstractClientChannel channel = (AbstractClientChannel) getChannel(buffer); LogUtils.info(log,"Received SSH_MSG_CHANNEL_OPEN_FAILURE on channel {0}", channel.getId()); channels.remove(channel.getId()); channel.handleOpenFailure(buffer); } /** * Process incoming data on a channel * * @param buffer the buffer containing the data * @throws Exception if an error occurs */ protected void channelData(Buffer buffer) throws Exception { Channel channel = getChannel(buffer); channel.handleData(buffer); } /** * Process incoming extended data on a channel * * @param buffer the buffer containing the data * @throws Exception if an error occurs */ protected void channelExtendedData(Buffer buffer) throws Exception { Channel channel = getChannel(buffer); channel.handleExtendedData(buffer); } /** * Process a window adjust packet on a channel * * @param buffer the buffer containing the window adjustement parameters * @throws Exception if an error occurs */ protected void channelWindowAdjust(Buffer buffer) throws Exception { try { Channel channel = getChannel(buffer); channel.handleWindowAdjust(buffer); } catch (SshException e) { log.info(e.getMessage()); } } /** * Process end of file on a channel * * @param buffer the buffer containing the packet * @throws Exception if an error occurs */ protected void channelEof(Buffer buffer) throws Exception { Channel channel = getChannel(buffer); channel.handleEof(); } /** * Close a channel due to a close packet received * * @param buffer the buffer containing the packet * @throws Exception if an error occurs */ protected void channelClose(Buffer buffer) throws Exception { Channel channel = getChannel(buffer); channel.handleClose(); unregisterChannel(channel); } /** * Remove this channel from the list of managed channels * * @param channel the channel */ public void unregisterChannel(Channel channel) { channels.remove(channel.getId()); } /** * Service a request on a channel * * @param buffer the buffer containing the request * @throws Exception if an error occurs */ protected void channelRequest(Buffer buffer) throws IOException { Channel channel = getChannel(buffer); channel.handleRequest(buffer); } /** * Process a failure on a channel * * @param buffer the buffer containing the packet * @throws Exception if an error occurs */ protected void channelFailure(Buffer buffer) throws Exception { Channel channel = getChannel(buffer); channel.handleFailure(); } /** * Retrieve the channel designated by the given packet * * @param buffer the incoming packet * @return the target channel * @throws IOException if the channel does not exists */ protected Channel getChannel(Buffer buffer) throws IOException { int recipient = buffer.getInt(); Channel channel = channels.get(recipient); if (channel == null) { buffer.rpos(buffer.rpos() - 5); SshConstants.Message cmd = buffer.getCommand(); throw new SshException("Received " + cmd + " on unknown channel " + recipient); } return channel; } /** * Retrieve a configuration property as an integer * * @param name the name of the property * @param defaultValue the default value * @return the value of the configuration property or the default value if not found */ public int getIntProperty(String name, int defaultValue) { try { String v = factoryManager.getProperties().get(name); if (v != null) { return Integer.parseInt(v); } } catch (Exception e) { // Ignore } return defaultValue; } /** * Returns the value of the user-defined attribute of this session. * * @param key the key of the attribute; must not be null. * @return <tt>null</tt> if there is no attribute with the specified key */ @SuppressWarnings("unchecked") public <T> T getAttribute(AttributeKey<T> key) { return (T)attributes.get(key); } /** * Sets a user-defined attribute. * * @param key the key of the attribute; must not be null. * @param value the value of the attribute; must not be null. * @return The old value of the attribute. <tt>null</tt> if it is new. */ @SuppressWarnings("unchecked") public <T, E extends T> T setAttribute(AttributeKey<T> key, E value) { return (T)attributes.put(key, value); } }
/* * #%L * ===================================================== * _____ _ ____ _ _ _ _ * |_ _|_ __ _ _ ___| |_ / __ \| | | | ___ | | | | * | | | '__| | | / __| __|/ / _` | |_| |/ __|| |_| | * | | | | | |_| \__ \ |_| | (_| | _ |\__ \| _ | * |_| |_| \__,_|___/\__|\ \__,_|_| |_||___/|_| |_| * \____/ * * ===================================================== * * Hochschule Hannover * (University of Applied Sciences and Arts, Hannover) * Faculty IV, Dept. of Computer Science * Ricklinger Stadtweg 118, 30459 Hannover, Germany * * Email: trust@f4-i.fh-hannover.de * Website: http://trust.f4.hs-hannover.de/ * * This file is part of ifmapj, version 2.3.2, implemented by the Trust@HsH * research group at the Hochschule Hannover. * %% * Copyright (C) 2010 - 2016 Trust@HsH * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package de.hshannover.f4.trust.ifmapj.channel; import java.io.ByteArrayInputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.HashMap; import java.util.Map; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.SSLSocketFactory; import de.hshannover.f4.trust.ifmapj.exception.CommunicationException; import de.hshannover.f4.trust.ifmapj.exception.InitializationException; import de.hshannover.f4.trust.ifmapj.log.IfmapJLog; /** * Attempt to create a {@link CommunicationHandler} implementation to be * independent of any extra library... Meaning making usage only of standard * JAVA classes. * * @author aw */ class JavaCommunicationHandler extends AbstractCommunicationHandler { private OutputStream mOutputStream; private InputStream mInputStream; private DataOutput mDataOutputStream; private DataInput mDataInputStream; private Map<String, String> mReceivedHeaders; private byte[] mBuffer; private class StatusLine { public int mStatusCode; public String mReason; } JavaCommunicationHandler(String url, String user, String pass, SSLSocketFactory sslSocketFactory, HostnameVerifier verifier, int initialConnectionTimeout) throws InitializationException { super(url, user, pass, sslSocketFactory, verifier, initialConnectionTimeout); // some random initial buffer mBuffer = new byte[1024]; mReceivedHeaders = new HashMap<String, String>(10); } @Override public InputStream doActualRequest(InputStream is) throws IOException, CommunicationException { StatusLine statusLine = null; int retLength = -1; sendHttpBody(is); statusLine = readStatusLine(); checkStatusLine(statusLine); receiveHeaders(); if (responseIsChunked()) { retLength = readChunkedBody(); } else if (responseContainsContentLength()) { retLength = readContinuousBody(); } else { throw new CommunicationException("Could not determine length of body"); } return new ByteArrayInputStream(mBuffer, 0, retLength); } private int readChunkedBody() throws CommunicationException, IOException { String chunkLengthLine = null; String tmpLine = null; int curOffset = 0; int curChunkLength = -1; while ((chunkLengthLine = mDataInputStream.readLine()) != null) { if (chunkLengthLine.length() == 0) { throw new CommunicationException("Unexpected empty chunk length"); } curChunkLength = parseChunkLength(chunkLengthLine); // Was the last chunk reached? if (curChunkLength == 0) { break; } reallocateBuffer(mBuffer.length + 2 * curChunkLength); readStreamIntoBuffer(mInputStream, curOffset, curChunkLength); curOffset += curChunkLength; // Read the CR LF sequence at the end of the chunk body tmpLine = mDataInputStream.readLine(); if (tmpLine == null || tmpLine.length() > 0) { throw new CommunicationException("Unexpected chunk ending: " + tmpLine); } } // After the last chunk, there might be some trailers, we ignore them // for now, but we need to read them anyway. while ((tmpLine = mDataInputStream.readLine()) != null) { if (tmpLine.length() == 0) { break; } } return curOffset; } private int parseChunkLength(String chunkLengthLine) throws CommunicationException { String []lengthStrElements = chunkLengthLine.split(" "); if (lengthStrElements.length < 1) { throw new CommunicationException("No chunk length included: " + chunkLengthLine); } try { return Integer.parseInt(lengthStrElements[0], 16); } catch (NumberFormatException e) { throw new CommunicationException("Could not parse chunk length"); } } private boolean responseContainsContentLength() { return findHeaderValue("Content-Length") != null; } private boolean responseIsChunked() { String teHeader = findHeaderValue("Transfer-Encoding"); // FIXME: We only look out for chunked, but // there could be something else as well return teHeader != null && teHeader.contains("chunked"); } @Override protected void prepareCommunication() throws IOException { if (mOutputStream != null && mInputStream != null) { return; } mOutputStream = getSocket().getOutputStream(); mInputStream = getSocket().getInputStream(); mDataOutputStream = new DataOutputStream(mOutputStream); mDataInputStream = new DataInputStream(mInputStream); } @Override protected void createPostRequest(String path) throws IOException { mReceivedHeaders.clear(); writeLine("POST " + path + " HTTP/1.1"); } @Override protected void addHeader(String key, String value) throws IOException { writeHeaderLine(key, value); } @Override protected void finishHeaders() throws IOException { writeHeaderEnding(); } @Override protected boolean replyIsGzipped() throws IOException { String encodingHdr = findHeaderValue("Content-Encoding"); return encodingHdr != null && encodingHdr.contains("gzip"); } private void sendHttpBody(InputStream is) throws IOException { int length = is.available(); allocateBuffer(length); readStreamIntoBuffer(is, length); sendBufferContents(length); } private void writeHeaderLine(String key, String value) throws IOException { writeLine(key + ": " + value); } private void writeHeaderEnding() throws IOException { writeLine(""); mOutputStream.flush(); } private void writeLine(String line) throws IOException { mDataOutputStream.write((line + "\r\n").getBytes()); } @Override protected void closeTcpConnectionImpl() throws IOException { IOException tmp = null; try { if (mInputStream != null) { mInputStream.close(); } } catch (IOException e) { if (tmp == null) { tmp = e; } } finally { mInputStream = null; } try { if (mOutputStream != null) { mOutputStream.close(); } } catch (IOException e) { if (tmp != null) { tmp = e; } } finally { mOutputStream = null; } mDataOutputStream = null; mDataInputStream = null; if (tmp != null) { throw tmp; } } private int getContentLength() throws CommunicationException { String lengthHdr = findHeaderValue("Content-Length"); if (lengthHdr == null) { throw new CommunicationException("No Content-Length header found"); } return parseContentLengthHeader(lengthHdr); } private int parseContentLengthHeader(String lengthHdr) throws CommunicationException { try { return Integer.parseInt(lengthHdr); } catch (NumberFormatException e) { throw new CommunicationException("Content-Length invalid: " + "\"" + lengthHdr + "\""); } } private String findHeaderValue(String hdrField) { for (String hdr : mReceivedHeaders.keySet()) { if (hdr.equalsIgnoreCase(hdrField)) { return mReceivedHeaders.get(hdr); } } return null; } private void receiveHeaders() throws CommunicationException, IOException { String line = null; mReceivedHeaders.clear(); while ((mDataInputStream.readLine()) != null && line.length() > 0) { line = mDataInputStream.readLine(); String[] fields = line.split(":", 2); if (fields.length < 2) { throw new CommunicationException("Invalid Header Received: " + line); } // trim a bit fields[0] = fields[0].replaceAll("^\\s+", ""); fields[0] = fields[0].replaceAll("\\s+$", ""); fields[1] = fields[1].replaceAll("^\\s+", ""); fields[1] = fields[1].replaceAll("\\s+$", ""); mReceivedHeaders.put(fields[0], fields[1]); } if (line == null) { throw new CommunicationException("Unexpected EOF reached"); } } private StatusLine readStatusLine() throws CommunicationException, IOException { StatusLine ret = new StatusLine(); String line = mDataInputStream.readLine(); if (line == null) { throw new CommunicationException("No status line received"); } String[] fields = line.split(" ", 3); if (fields.length < 2) { throw new CommunicationException("Bad status line received"); } String proto = fields[0]; if (!proto.equals("HTTP/1.1")) { throw new CommunicationException("Communication not HTTP/1.1"); } try { ret.mStatusCode = Integer.parseInt(fields[1]); } catch (NumberFormatException e) { throw new CommunicationException("Bad status code received"); } if (fields.length == 3) { ret.mReason = fields[2]; } return ret; } private int readContinuousBody() throws IOException, CommunicationException { int length = getContentLength(); allocateBuffer(length); readStreamIntoBuffer(mInputStream, length); return length; } private void checkStatusLine(StatusLine status) throws CommunicationException { if (status.mStatusCode != 200) { IfmapJLog.warn("HTTP Status Code: " + status.mStatusCode + " " + status.mReason); throw new CommunicationException("HTTP Status Code: " + status.mStatusCode + " " + status.mReason); } } private void sendBufferContents(int length) throws IOException { mOutputStream.write(mBuffer, 0, length); mOutputStream.flush(); } /** * Helper to read length bytes into {@link JavaCommunicationHandler#mBuffer}. * * @param is * @param length * @throws IOException */ private void readStreamIntoBuffer(InputStream is, int length) throws IOException { readStreamIntoBuffer(is, 0, length); } private void readStreamIntoBuffer(InputStream is, int off, int length) throws IOException { int read = 0; int ret; while (read < length) { ret = is.read(mBuffer, read + off, length - read); if (ret == -1) { throw new IOException("Stream exception"); } read += ret; } } /** * Check if our current buffer is too small, and if yes, allocate some * more * * @param length */ private void allocateBuffer(int newLength) { allocateBuffer(newLength, false); } private void reallocateBuffer(int newLength) { allocateBuffer(newLength, true); } private void allocateBuffer(int newLength, boolean copy) { byte[] tmp; if (newLength > mBuffer.length) { tmp = mBuffer; mBuffer = new byte[newLength]; if (copy) { System.arraycopy(tmp, 0, mBuffer, 0, tmp.length); } } } }
package org.jutility.io.database; /* * #%L * jutility-incubation * %% * Copyright (C) 2013 - 2014 jutility.org * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import org.jutility.common.reflection.ReflectionUtils; /** * * @author Peter J. Radics * @version 0.1 * */ public class ListPropertyInfo extends BaseProperty { private final String tableName; private final Map<String, String> containerClassAliasMap; private final Map<String, String> listTypeAliasMap; private final Class<?> collectionType; /** * Returns the table name. * * @return the table name. */ public String getTableName() { return tableName; } /** * Returns the collection type. * * @return the collection type. */ public Class<?> getCollectionType() { return this.collectionType; } /** * Returns the container-class alias map. * * @return the container-class alias map. */ public Map<String, String> getContainerClassAliasMap() { return this.containerClassAliasMap; } /** * Returns the list-type alias map. * * @return the list-type alias map. */ public Map<String, String> getListTypeAliasMap() { return this.listTypeAliasMap; } /** * Returns the alias for a container-class key. * * @param key * the key. * @return the alias. */ public String getAliasForContainerClassKey(String key) { return this.containerClassAliasMap.get(key); } /** * Returns the container-class key for an alias. * * @param alias * the alias. * @return the key. */ public String getContainerClassKeyForAlias(String alias) { for (String primaryKey : this.containerClassAliasMap.keySet()) { if (this.containerClassAliasMap.get(primaryKey).equals(alias)) { return primaryKey; } } return null; } /** * Returns the alias for a list-type key. * * @param key * the key. * @return the alias. */ public String getAliasForListTypeKey(String key) { return this.listTypeAliasMap.get(key); } /** * Returns the list-type key for an alias. * * @param foreignKeyAlias * the alias. * @return the key. */ public String getListTypeKeyForAlias(String foreignKeyAlias) { for (String foreignKey : this.containerClassAliasMap.keySet()) { if (this.listTypeAliasMap.get(foreignKey).equals(foreignKeyAlias)) { return foreignKey; } } return null; } /** * Returns a list of all aliased keys. * * @return a list of all aliased keys. */ public Set<String> getAliasedKeySet() { Set<String> aliasedKeyList = new LinkedHashSet<String>(); for (String key : this.getContainerClassAliasMap().keySet()) { aliasedKeyList.add(this.getAliasForContainerClassKey(key)); } for (String key : this.getListTypeAliasMap().keySet()) { aliasedKeyList.add(this.getAliasForListTypeKey(key)); } return aliasedKeyList; } /** * Returns a list of all aliased keys of the container class. * * @return a list of all aliased keys of the container class. */ public Set<String> getAliasedContainerClassKeySet() { Set<String> aliasedKeyList = new LinkedHashSet<String>(); for (String key : this.getContainerClassAliasMap().keySet()) { aliasedKeyList.add(this.getAliasForContainerClassKey(key)); } return aliasedKeyList; } /** * Returns a list of all aliased keys of the list type. * * @return a list of all aliased keys of the list type. */ public Set<String> getAliasedListTypeKeySet() { Set<String> aliasedKeyList = new LinkedHashSet<String>(); for (String key : this.getListTypeAliasMap().keySet()) { aliasedKeyList.add(this.getAliasForListTypeKey(key)); } return aliasedKeyList; } /** * Creates a new instance of the {@link ListPropertyInfo} class. * * @param propertyName * @param listType * @param collectionType * @param tableName * @param containerClassAliasMap * @param listTypeAliasMap */ public ListPropertyInfo(String propertyName, Class<?> listType, Class<?> collectionType, String tableName, Map<String, String> containerClassAliasMap, Map<String, String> listTypeAliasMap) { this(propertyName, listType, collectionType, tableName, containerClassAliasMap, listTypeAliasMap, null, null, null); } /** * Creates a new instance of the {@link ListPropertyInfo} class. * * @param propertyName * @param listType * @param collectionType * @param tableName * @param containerClassAliasMap * @param listTypeAliasMap * @param field */ public ListPropertyInfo(String propertyName, Class<?> listType, Class<?> collectionType, String tableName, Map<String, String> containerClassAliasMap, Map<String, String> listTypeAliasMap, Field field) { this(propertyName, listType, collectionType, tableName, containerClassAliasMap, listTypeAliasMap, field, null, null); } /** * Creates a new instance of the {@link ListPropertyInfo} class. * * @param propertyName * @param listType * @param collectionType * @param tableName * @param containerClassAliasMap * @param listTypeAliasMap * @param getter * @param setter */ public ListPropertyInfo(String propertyName, Class<?> listType, Class<?> collectionType, String tableName, Map<String, String> containerClassAliasMap, Map<String, String> listTypeAliasMap, Method getter, Method setter) { this(propertyName, listType, collectionType, tableName, containerClassAliasMap, listTypeAliasMap, null, getter, setter); } /** * Creates a new instance of the {@link ListPropertyInfo} class. * * @param propertyName * @param propertyType * @param collectionType * @param tableName * @param containerClassAliasMap * @param listTypeAliasMap * @param field * @param getter * @param setter */ public ListPropertyInfo(String propertyName, Class<?> propertyType, Class<?> collectionType, String tableName, Map<String, String> containerClassAliasMap, Map<String, String> listTypeAliasMap, Field field, Method getter, Method setter) { super(propertyName, propertyType); this.tableName = tableName; this.containerClassAliasMap = containerClassAliasMap; this.listTypeAliasMap = listTypeAliasMap; this.collectionType = collectionType; this.setField(field); this.setGetter(getter); this.setSetter(setter); } /** * Aliases a container-class key-value map. * * @param keyValueMap * the key-value map. * @return an aliased key-value map. */ public Map<String, ?> aliasContainerClassKeyValueMap( Map<String, ?> keyValueMap) { LinkedHashMap<String, Object> aliasedKeyValueMap = new LinkedHashMap<String, Object>( keyValueMap.size()); for (String key : keyValueMap.keySet()) { Object value = keyValueMap.get(key); String alias = this.getAliasForContainerClassKey(key); aliasedKeyValueMap.put(alias, value); } return aliasedKeyValueMap; } /** * De-Aliases a container-class key-value map. * * @param aliasedKeyValueMap * the aliased key-value map. * @return a de-aliased key-value map. */ public Map<String, ?> dealiasContainerClassKeyValueMap( Map<String, ?> aliasedKeyValueMap) { LinkedHashMap<String, Object> keyValueMap = new LinkedHashMap<String, Object>( aliasedKeyValueMap.size()); for (String alias : aliasedKeyValueMap.keySet()) { Object value = aliasedKeyValueMap.get(alias); String key = this.getContainerClassKeyForAlias(alias); keyValueMap.put(key, value); } return keyValueMap; } /** * Aliases a list-type key-value map. * * @param keyValueMap * the key-value map. * @return an aliased key-value map. */ public Map<String, ?> aliasListTypeKeyValueMap(Map<String, ?> keyValueMap) { LinkedHashMap<String, Object> aliasedKeyValueMap = new LinkedHashMap<String, Object>( keyValueMap.size()); for (String key : keyValueMap.keySet()) { Object value = keyValueMap.get(key); String alias = this.getAliasForListTypeKey(key); aliasedKeyValueMap.put(alias, value); } return aliasedKeyValueMap; } /** * De-Aliases a list-type key-value map. * * @param aliasedKeyValueMap * the aliased key-value map. * @return a de-aliased key-value map. */ public Map<String, ?> dealiasListTypeKeyValueMap( Map<String, ?> aliasedKeyValueMap) { LinkedHashMap<String, Object> keyValueMap = new LinkedHashMap<String, Object>( aliasedKeyValueMap.size()); for (String alias : aliasedKeyValueMap.keySet()) { Object value = aliasedKeyValueMap.get(alias); String key = this.getListTypeKeyForAlias(alias); keyValueMap.put(key, value); } return keyValueMap; } /** * Returns the mutation signature. * * @return the mutation signature. */ public String mutationSignature() { String signature = "a field"; if (this.getSetter() != null) { signature = ReflectionUtils.getSignature(this.getSetter()); } else if (this.getField() != null) { signature = ReflectionUtils.getSignature(this.getField()); } return signature; } /** * Returns the access signature. * * @return the access signature. */ public String accesssSignature() { String signature = "a field"; if (this.getGetter() != null) { signature = ReflectionUtils.getSignature(this.getGetter()); } else if (this.getField() != null) { signature = ReflectionUtils.getSignature(this.getField()); } return signature; } @Override public boolean equals(Object obj) { if (obj != null && obj instanceof ListPropertyInfo) { ListPropertyInfo otherEntry = (ListPropertyInfo) obj; return this.getPropertyName().equals(otherEntry.getPropertyName()); } return false; } @Override public int hashCode() { return this.getPropertyName().hashCode(); } @Override public String toString() { String returnValue = this.getPropertyName() + "->`" + tableName + "` " + containerClassAliasMap + "-" + listTypeAliasMap; returnValue += ": Field: " + (this.getField() != null); returnValue += ", Getter: " + (this.getGetter() != null); returnValue += ", Setter: " + (this.getSetter() != null); return returnValue; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.test.framework; import com.google.common.base.Stopwatch; import com.google.common.collect.Lists; import org.apache.drill.test.framework.TestCaseModeler.TestMatrix; import org.apache.drill.test.framework.TestVerifier.TestStatus; import org.apache.drill.test.framework.TestVerifier.VerificationException; import org.apache.drill.test.framework.TestVerifier.PlanVerificationException; import org.apache.log4j.Logger; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.concurrent.atomic.AtomicBoolean; public class DrillTestOdbc implements DrillTest{ private static final Logger LOG = Logger.getLogger("DrillTestLogger"); private String query = null; private String outputFilename; private volatile TestStatus testStatus = TestStatus.PENDING; private Exception exception; private DrillTestCase modeler; private Stopwatch duration; private TestMatrix matrix; private List<Integer> columnTypes; private List columnLabels = new ArrayList<String>(); private Thread thread; private int id; private int totalCases; private static volatile int noOfCasesCompleted; public DrillTestOdbc(DrillTestCase modeler, int id, int totalCases) { this.id = id; this.modeler = modeler; this.matrix = modeler.matrices.get(0); this.totalCases = totalCases; } public void run() { final Stopwatch stopwatch = Stopwatch.createStarted(); this.thread = Thread.currentThread(); int mainQueryIndex = -1; String[] queries = null; setTestStatus(TestStatus.RUNNING); try { outputFilename = Utils.generateOutputFileName(modeler.queryFilename, modeler.testId, false) + "_" + id; } catch (IOException e) { LOG.error(e.getMessage()); throw new RuntimeException(e); } CmdConsOut cmdConsOut = null; String command = System.getProperty("user.dir") + "/" + DrillTestDefaults.DRILL_TESTDATA_DIR + "/" + modeler.script + " " + modeler.queryFilename + " " + outputFilename; LOG.info("Running test " + command); try { cmdConsOut = Utils.execCmd(command); if (cmdConsOut.exitCode > 0) { throw new RuntimeException("ERROR: " + cmdConsOut.consoleErr); } queries = Utils.getSqlStatements(modeler.queryFilename); for (int i = 0; i < queries.length && mainQueryIndex == -1; i++) { if (queries[i].startsWith("--@test")) { mainQueryIndex = i; } } if (mainQueryIndex == -1) { mainQueryIndex = queries.length / 2; // Currently, the main query must be in the middle of the list of queries } query = queries[mainQueryIndex]; // extract column types into columnTypes and column names into columnLabels getColumnNamesAndTypes(modeler.queryFilename); switch (cmdConsOut.exitCode) { case 0: TestVerifier testVerifier = new TestVerifier(columnTypes, query, columnLabels, matrix); try { if (query.startsWith("explain") || matrix.verificationTypes.get(0).equalsIgnoreCase("regex") || matrix.verificationTypes.get(0).equalsIgnoreCase("regex-no-order") || matrix.verificationTypes.get(0).equalsIgnoreCase("filter-ratio")) { setTestStatus(testVerifier.verifyTextPlan(modeler.expectedFilename, outputFilename)); } else { // "in-memory" setTestStatus(testVerifier.verifyResultSet(modeler.expectedFilename, outputFilename)); } } catch (VerificationException e) { fail(TestStatus.DATA_VERIFICATION_FAILURE, e); } catch (PlanVerificationException e) { fail(TestStatus.PLAN_VERIFICATION_FAILURE, e); }; break; case 1: setTestStatus(TestStatus.EXECUTION_FAILURE); break; case 2: setTestStatus(TestStatus.DATA_VERIFICATION_FAILURE); break; case 3: setTestStatus(TestStatus.ORDER_MISMATCH); break; case 4: setTestStatus(TestStatus.TIMEOUT); break; case 5: setTestStatus(TestStatus.CANCELED); default: setTestStatus(TestStatus.EXECUTION_FAILURE); } } catch (Exception e) { LOG.info("execution exception " + e.getMessage()); fail(TestStatus.EXECUTION_FAILURE, e); } finally { if (testStatus == TestStatus.PASS && !TestDriver.cmdParam.outputQueryResult) { // Utils.deleteFile(outputFilename); } duration = stopwatch; LOG.info(testStatus + " (" + stopwatch + ") " + modeler.script + " " + modeler.queryFilename); if((++noOfCasesCompleted%100==0 && noOfCasesCompleted <= (totalCases*TestDriver.cmdParam.iterations*TestDriver.cmdParam.clones))||(noOfCasesCompleted>=totalCases && noOfCasesCompleted%totalCases==0)){ LOG.info("----------------------------------------------------------------------------------------------------------------\nExecution completed for "+(noOfCasesCompleted)+" out of "+ (totalCases*TestDriver.cmdParam.iterations*TestDriver.cmdParam.clones)+" tests\n----------------------------------------------------------------------------------------------------------------"); } } } protected void fail(TestStatus status, Exception e) { if (testStatus == TestStatus.TIMEOUT) { return; } setTestStatus(status); exception = e; } @Override public void cancel() { setTestStatus(TestStatus.TIMEOUT); thread.interrupt(); } public synchronized void setTestStatus(TestStatus status) { testStatus = status; } @Override public TestStatus getTestStatus() { return testStatus; } @Override public Exception getException() { return exception; } @Override public String getInputFile() { return modeler.queryFilename; } @Override public String getExpectedFile(){ return modeler.expectedFilename; } @Override public String getQuery() { if (query == null) { String[] queries = null; try { queries = Utils.getSqlStatements(modeler.queryFilename); } catch (IOException e) { e.printStackTrace(); } int mainQueryIndex = queries.length / 2; // Currently, the main query must be in the middle of the list of queries query = queries[mainQueryIndex]; } return query; } @Override public String getTestId() { return modeler.testId; } @Override public int getCloneId() { return id; } @Override public Stopwatch getDuration() { return duration; } // The python script that executes queries using the Simba // ODBC driver, creates two metadata files for each query file. // A <query_file>.type file contains a list of the data types // for the columns returned by the query. // A <query_file>.label file contains a list of the column names // returned by the query. // These files are scanned, and the types and column names are // loaded into columnTypes and columnLabels, so they can be passed // to TestVerifier. private void getColumnNamesAndTypes(String queryFilename) throws IOException { columnTypes = Lists.newArrayList(); int index = queryFilename.lastIndexOf('.'); String filename = queryFilename.substring(0, index); String typeFilename = filename + ".type"; BufferedReader reader = new BufferedReader(new FileReader(new File(typeFilename))); String line = ""; while ((line = reader.readLine()) != null) { int value = Integer.parseInt(line); columnTypes.add(value); } String labelFilename = filename + ".label"; reader = new BufferedReader(new FileReader(new File(labelFilename))); while ((line = reader.readLine()) != null) { columnLabels.add(line); } } }
package org.jabref.gui; import java.awt.BorderLayout; import java.awt.Component; import java.awt.Container; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.event.ActionEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.ActionMap; import javax.swing.DefaultCellEditor; import javax.swing.InputMap; import javax.swing.JComponent; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.JTextField; import javax.swing.JToolBar; import javax.swing.LayoutFocusTraversalPolicy; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableColumnModel; import javax.swing.undo.CompoundEdit; import org.jabref.Globals; import org.jabref.gui.actions.Actions; import org.jabref.gui.help.HelpAction; import org.jabref.gui.icon.IconTheme; import org.jabref.gui.keyboard.KeyBinding; import org.jabref.gui.undo.UndoableInsertString; import org.jabref.gui.undo.UndoableRemoveString; import org.jabref.gui.undo.UndoableStringChange; import org.jabref.gui.util.WindowLocation; import org.jabref.logic.bibtex.InvalidFieldValueException; import org.jabref.logic.bibtex.LatexFieldFormatter; import org.jabref.logic.bibtex.comparator.BibtexStringComparator; import org.jabref.logic.help.HelpFile; import org.jabref.logic.l10n.Localization; import org.jabref.model.database.BibDatabase; import org.jabref.model.database.KeyCollisionException; import org.jabref.model.entry.BibtexString; import org.jabref.preferences.JabRefPreferences; class StringDialog extends JabRefDialog { private static final String STRINGS_TITLE = Localization.lang("Strings for library"); // A reference to the entry this object works on. private final BibDatabase base; private final BasePanel panel; private final StringTable table; private final HelpAction helpAction; private final SaveDatabaseAction saveAction = new SaveDatabaseAction(this); // The action concerned with closing the window. private final CloseAction closeAction = new CloseAction(); private List<BibtexString> strings; public StringDialog(JabRefFrame frame, BasePanel panel, BibDatabase base) { super(null, StringDialog.class); this.panel = panel; this.base = base; sortStrings(); helpAction = new HelpAction(Localization.lang("Help"), HelpFile.STRING_EDITOR); addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { closeAction.actionPerformed(null); } }); // We replace the default FocusTraversalPolicy with a subclass // that only allows the StringTable to gain keyboard focus. setFocusTraversalPolicy(new LayoutFocusTraversalPolicy() { @Override protected boolean accept(Component c) { return super.accept(c) && (c instanceof StringTable); } }); JPanel pan = new JPanel(); GridBagLayout gbl = new GridBagLayout(); pan.setLayout(gbl); GridBagConstraints con = new GridBagConstraints(); con.fill = GridBagConstraints.BOTH; con.weighty = 1; con.weightx = 1; StringTableModel stm = new StringTableModel(this, base); table = new StringTable(stm); if (!base.hasNoStrings()) { table.setRowSelectionInterval(0, 0); } gbl.setConstraints(table.getPane(), con); pan.add(table.getPane()); JToolBar tlb = new OSXCompatibleToolbar(); InputMap im = tlb.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW); ActionMap am = tlb.getActionMap(); im.put(Globals.getKeyPrefs().getKey(KeyBinding.STRING_DIALOG_ADD_STRING), "add"); NewStringAction newStringAction = new NewStringAction(this); am.put("add", newStringAction); im.put(Globals.getKeyPrefs().getKey(KeyBinding.STRING_DIALOG_REMOVE_STRING), "remove"); RemoveStringAction removeStringAction = new RemoveStringAction(this); am.put("remove", removeStringAction); im.put(Globals.getKeyPrefs().getKey(KeyBinding.SAVE_DATABASE), "save"); am.put("save", saveAction); im.put(Globals.getKeyPrefs().getKey(KeyBinding.CLOSE), "close"); am.put("close", closeAction); im.put(Globals.getKeyPrefs().getKey(KeyBinding.HELP), "help"); am.put("help", helpAction); im.put(Globals.getKeyPrefs().getKey(KeyBinding.UNDO), "undo"); UndoAction undoAction = new UndoAction(); am.put("undo", undoAction); im.put(Globals.getKeyPrefs().getKey(KeyBinding.REDO), "redo"); RedoAction redoAction = new RedoAction(); am.put("redo", redoAction); tlb.add(newStringAction); tlb.add(removeStringAction); tlb.addSeparator(); tlb.add(helpAction); Container conPane = getContentPane(); conPane.add(tlb, BorderLayout.NORTH); conPane.add(pan, BorderLayout.CENTER); setTitle(STRINGS_TITLE + ": " + panel.getBibDatabaseContext().getDatabaseFile().map(File::getName).orElse(GUIGlobals.UNTITLED_TITLE)); WindowLocation pw = new WindowLocation(this, JabRefPreferences.STRINGS_POS_X, JabRefPreferences.STRINGS_POS_Y, JabRefPreferences.STRINGS_SIZE_X, JabRefPreferences.STRINGS_SIZE_Y); pw.displayWindowAtStoredLocation(); } private static boolean isNumber(String name) { // A pure integer number cannot be used as a string label, // since Bibtex will read it as a number. try { Integer.parseInt(name); return true; } catch (NumberFormatException ex) { return false; } } private void sortStrings() { // Rebuild our sorted set of strings: strings = new ArrayList<>(); for (String s : base.getStringKeySet()) { strings.add(base.getString(s)); } Collections.sort(strings, new BibtexStringComparator(false)); } public void refreshTable() { sortStrings(); table.revalidate(); table.clearSelection(); table.repaint(); } public void saveDatabase() { panel.runCommand(Actions.SAVE); } public void assureNotEditing() { if (table.isEditing()) { int col = table.getEditingColumn(); int row = table.getEditingRow(); table.getCellEditor(row, col).stopCellEditing(); } } static class SaveDatabaseAction extends AbstractAction { private final StringDialog parent; public SaveDatabaseAction(StringDialog parent) { super("Save library", IconTheme.JabRefIcons.SAVE.getIcon()); putValue(Action.SHORT_DESCRIPTION, Localization.lang("Save library")); this.parent = parent; } @Override public void actionPerformed(ActionEvent e) { parent.saveDatabase(); } } class StringTable extends JTable { private final JScrollPane sp = new JScrollPane(this); public StringTable(StringTableModel stm) { super(stm); setShowVerticalLines(true); setShowHorizontalLines(true); setColumnSelectionAllowed(true); DefaultCellEditor dce = new DefaultCellEditor(new JTextField()); dce.setClickCountToStart(2); setDefaultEditor(String.class, dce); TableColumnModel cm = getColumnModel(); cm.getColumn(0).setPreferredWidth(800); cm.getColumn(1).setPreferredWidth(2000); getInputMap().put(Globals.getKeyPrefs().getKey(KeyBinding.CLOSE), "close"); getActionMap().put("close", closeAction); getInputMap().put(Globals.getKeyPrefs().getKey(KeyBinding.HELP), "help"); getActionMap().put("help", helpAction); } public JComponent getPane() { return sp; } } class StringTableModel extends AbstractTableModel { private final BibDatabase tbase; private final StringDialog parent; public StringTableModel(StringDialog parent, BibDatabase base) { this.parent = parent; this.tbase = base; } @Override public Object getValueAt(int row, int col) { return col == 0 ? strings.get(row).getName() : strings.get(row).getContent(); } @Override public void setValueAt(Object value, int row, int col) { if (col == 0) { // Change name of string. if (!value.equals(strings.get(row).getName())) { if (tbase.hasStringLabel((String) value)) { JOptionPane.showMessageDialog(parent, Localization.lang("A string with that label already exists"), Localization.lang("Label"), JOptionPane.ERROR_MESSAGE); } else if (((String) value).contains(" ")) { JOptionPane.showMessageDialog(parent, Localization.lang("The label of the string cannot contain spaces."), Localization.lang("Label"), JOptionPane.ERROR_MESSAGE); } else if (((String) value).contains("#")) { JOptionPane.showMessageDialog(parent, Localization.lang("The label of the string cannot contain the '#' character."), Localization.lang("Label"), JOptionPane.ERROR_MESSAGE); } else if (isNumber((String) value)) { JOptionPane.showMessageDialog(parent, Localization.lang("The label of the string cannot be a number."), Localization.lang("Label"), JOptionPane.ERROR_MESSAGE); } else { // Store undo information. BibtexString subject = strings.get(row); panel.getUndoManager().addEdit( new UndoableStringChange(panel, subject, true, subject.getName(), (String) value)); subject.setName((String) value); panel.markBaseChanged(); refreshTable(); } } } else { // Change content of string. BibtexString subject = strings.get(row); if (!value.equals(subject.getContent())) { try { new LatexFieldFormatter(Globals.prefs.getLatexFieldFormatterPreferences()) .format((String) value, "__dummy"); } catch (InvalidFieldValueException ex) { return; } // Store undo information. panel.getUndoManager().addEdit( new UndoableStringChange(panel, subject, false, subject.getContent(), (String) value)); subject.setContent((String) value); panel.markBaseChanged(); } } } @Override public int getColumnCount() { return 2; } @Override public int getRowCount() { return strings.size(); } @Override public String getColumnName(int col) { return col == 0 ? Localization.lang("Label") : Localization.lang("Content"); } @Override public boolean isCellEditable(int row, int col) { return true; } } class CloseAction extends AbstractAction { public CloseAction() { super("Close window"); putValue(Action.SHORT_DESCRIPTION, Localization.lang("Close dialog")); } @Override public void actionPerformed(ActionEvent e) { panel.stringsClosing(); dispose(); } } class NewStringAction extends AbstractAction { private final StringDialog parent; public NewStringAction(StringDialog parent) { super("New string", IconTheme.JabRefIcons.ADD.getIcon()); putValue(Action.SHORT_DESCRIPTION, Localization.lang("New string")); this.parent = parent; } @Override public void actionPerformed(ActionEvent e) { String name = JOptionPane.showInputDialog(parent, Localization.lang("Please enter the string's label")); if (name == null) { return; } if (isNumber(name)) { JOptionPane.showMessageDialog(parent, Localization.lang("The label of the string cannot be a number."), Localization.lang("Label"), JOptionPane.ERROR_MESSAGE); return; } if (name.contains("#")) { JOptionPane.showMessageDialog(parent, Localization.lang("The label of the string cannot contain the '#' character."), Localization.lang("Label"), JOptionPane.ERROR_MESSAGE); return; } if (name.contains(" ")) { JOptionPane.showMessageDialog(parent, Localization.lang("The label of the string cannot contain spaces."), Localization.lang("Label"), JOptionPane.ERROR_MESSAGE); return; } try { BibtexString bs = new BibtexString(name, ""); // Store undo information: panel.getUndoManager().addEdit(new UndoableInsertString(panel, panel.getDatabase(), bs)); base.addString(bs); refreshTable(); panel.markBaseChanged(); } catch (KeyCollisionException ex) { JOptionPane.showMessageDialog(parent, Localization.lang("A string with that label already exists"), Localization.lang("Label"), JOptionPane.ERROR_MESSAGE); } } } class RemoveStringAction extends AbstractAction { private final StringDialog parent; public RemoveStringAction(StringDialog parent) { super("Remove selected strings", IconTheme.JabRefIcons.REMOVE.getIcon()); putValue(Action.SHORT_DESCRIPTION, Localization.lang("Remove selected strings")); this.parent = parent; } @Override public void actionPerformed(ActionEvent e) { int[] sel = table.getSelectedRows(); if (sel.length > 0) { // Make sure no cell is being edited, as caused by the // keystroke. This makes the content hang on the screen. assureNotEditing(); String msg = (sel.length > 1 ? Localization.lang("Really delete the %0 selected entries?", Integer.toString(sel.length)) : Localization.lang("Really delete the selected entry?")); int answer = JOptionPane.showConfirmDialog(parent, msg, Localization.lang("Delete strings"), JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE); if (answer == JOptionPane.YES_OPTION) { CompoundEdit ce = new CompoundEdit(); for (int i = sel.length - 1; i >= 0; i--) { // Delete the strings backwards to avoid moving indexes. BibtexString subject = strings.get(sel[i]); // Store undo information: ce.addEdit(new UndoableRemoveString(panel, base, subject)); base.removeString(subject.getId()); } ce.end(); panel.getUndoManager().addEdit(ce); refreshTable(); if (!base.hasNoStrings()) { table.setRowSelectionInterval(0, 0); } } } } } class UndoAction extends AbstractAction { public UndoAction() { super("Undo", IconTheme.JabRefIcons.UNDO.getIcon()); putValue(Action.SHORT_DESCRIPTION, Localization.lang("Undo")); } @Override public void actionPerformed(ActionEvent e) { panel.runCommand(Actions.UNDO); } } class RedoAction extends AbstractAction { public RedoAction() { super("Redo", IconTheme.JabRefIcons.REDO.getIcon()); putValue(Action.SHORT_DESCRIPTION, Localization.lang("Redo")); } @Override public void actionPerformed(ActionEvent e) { panel.runCommand(Actions.REDO); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.plugin.phoenix5; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.trino.plugin.jdbc.BaseJdbcClient; import io.trino.plugin.jdbc.ColumnMapping; import io.trino.plugin.jdbc.ConnectionFactory; import io.trino.plugin.jdbc.JdbcColumnHandle; import io.trino.plugin.jdbc.JdbcOutputTableHandle; import io.trino.plugin.jdbc.JdbcSortItem; import io.trino.plugin.jdbc.JdbcSplit; import io.trino.plugin.jdbc.JdbcTableHandle; import io.trino.plugin.jdbc.JdbcTypeHandle; import io.trino.plugin.jdbc.ObjectReadFunction; import io.trino.plugin.jdbc.ObjectWriteFunction; import io.trino.plugin.jdbc.PreparedQuery; import io.trino.plugin.jdbc.QueryBuilder; import io.trino.plugin.jdbc.WriteFunction; import io.trino.plugin.jdbc.WriteMapping; import io.trino.plugin.jdbc.mapping.IdentifierMapping; import io.trino.spi.TrinoException; import io.trino.spi.block.Block; import io.trino.spi.connector.ColumnMetadata; import io.trino.spi.connector.ConnectorSession; import io.trino.spi.connector.ConnectorTableMetadata; import io.trino.spi.connector.SchemaNotFoundException; import io.trino.spi.connector.SchemaTableName; import io.trino.spi.security.ConnectorIdentity; import io.trino.spi.type.ArrayType; import io.trino.spi.type.CharType; import io.trino.spi.type.DecimalType; import io.trino.spi.type.Decimals; import io.trino.spi.type.Type; import io.trino.spi.type.VarbinaryType; import io.trino.spi.type.VarcharType; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.compile.QueryPlan; import org.apache.phoenix.compile.StatementContext; import org.apache.phoenix.exception.SQLExceptionCode; import org.apache.phoenix.iterate.ConcatResultIterator; import org.apache.phoenix.iterate.LookAheadResultIterator; import org.apache.phoenix.iterate.MapReduceParallelScanGrouper; import org.apache.phoenix.iterate.PeekingResultIterator; import org.apache.phoenix.iterate.ResultIterator; import org.apache.phoenix.iterate.RoundRobinResultIterator; import org.apache.phoenix.iterate.SequenceResultIterator; import org.apache.phoenix.iterate.TableResultIterator; import org.apache.phoenix.jdbc.DelegatePreparedStatement; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.jdbc.PhoenixPreparedStatement; import org.apache.phoenix.jdbc.PhoenixResultSet; import org.apache.phoenix.mapreduce.PhoenixInputSplit; import org.apache.phoenix.monitoring.ScanMetricsHolder; import org.apache.phoenix.query.ConnectionQueryServices; import org.apache.phoenix.query.HBaseFactoryProvider; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.schema.PColumn; import org.apache.phoenix.schema.PName; import org.apache.phoenix.schema.PTable; import org.apache.phoenix.schema.TableProperty; import org.apache.phoenix.schema.types.PDataType; import org.apache.phoenix.util.SchemaUtil; import javax.inject.Inject; import java.io.IOException; import java.sql.Array; import java.sql.Connection; import java.sql.JDBCType; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.StringJoiner; import java.util.function.BiFunction; import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Verify.verify; import static io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping; import static io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction; import static io.trino.plugin.jdbc.StandardColumnMappings.booleanColumnMapping; import static io.trino.plugin.jdbc.StandardColumnMappings.booleanWriteFunction; import static io.trino.plugin.jdbc.StandardColumnMappings.charWriteFunction; import static io.trino.plugin.jdbc.StandardColumnMappings.dateColumnMapping; import static io.trino.plugin.jdbc.StandardColumnMappings.dateWriteFunction; import static io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping; import static io.trino.plugin.jdbc.StandardColumnMappings.defaultCharColumnMapping; import static io.trino.plugin.jdbc.StandardColumnMappings.defaultVarcharColumnMapping; import static io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping; import static io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction; import static io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping; import static io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction; import static io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction; import static io.trino.plugin.jdbc.StandardColumnMappings.realColumnMapping; import static io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction; import static io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction; import static io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping; import static io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction; import static io.trino.plugin.jdbc.StandardColumnMappings.timeWriteFunctionUsingSqlTime; import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintColumnMapping; import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction; import static io.trino.plugin.jdbc.StandardColumnMappings.varbinaryColumnMapping; import static io.trino.plugin.jdbc.StandardColumnMappings.varbinaryWriteFunction; import static io.trino.plugin.jdbc.StandardColumnMappings.varcharColumnMapping; import static io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction; import static io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling; import static io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR; import static io.trino.plugin.phoenix5.MetadataUtil.getEscapedTableName; import static io.trino.plugin.phoenix5.MetadataUtil.toPhoenixSchemaName; import static io.trino.plugin.phoenix5.PhoenixClientModule.getConnectionProperties; import static io.trino.plugin.phoenix5.PhoenixColumnProperties.isPrimaryKey; import static io.trino.plugin.phoenix5.PhoenixErrorCode.PHOENIX_METADATA_ERROR; import static io.trino.plugin.phoenix5.PhoenixErrorCode.PHOENIX_QUERY_ERROR; import static io.trino.plugin.phoenix5.PhoenixMetadata.DEFAULT_SCHEMA; import static io.trino.plugin.phoenix5.TypeUtils.getArrayElementPhoenixTypeName; import static io.trino.plugin.phoenix5.TypeUtils.getJdbcObjectArray; import static io.trino.plugin.phoenix5.TypeUtils.jdbcObjectArrayToBlock; import static io.trino.plugin.phoenix5.TypeUtils.toBoxedArray; import static io.trino.spi.StandardErrorCode.ALREADY_EXISTS; import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; import static io.trino.spi.type.BigintType.BIGINT; import static io.trino.spi.type.BooleanType.BOOLEAN; import static io.trino.spi.type.DateType.DATE; import static io.trino.spi.type.DecimalType.createDecimalType; import static io.trino.spi.type.DoubleType.DOUBLE; import static io.trino.spi.type.IntegerType.INTEGER; import static io.trino.spi.type.RealType.REAL; import static io.trino.spi.type.SmallintType.SMALLINT; import static io.trino.spi.type.TimeType.TIME; import static io.trino.spi.type.TimeWithTimeZoneType.TIME_WITH_TIME_ZONE; import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MILLIS; import static io.trino.spi.type.TinyintType.TINYINT; import static io.trino.spi.type.VarcharType.createUnboundedVarcharType; import static java.lang.Math.max; import static java.lang.String.format; import static java.lang.String.join; import static java.math.RoundingMode.UNNECESSARY; import static java.sql.Types.ARRAY; import static java.sql.Types.LONGNVARCHAR; import static java.sql.Types.LONGVARCHAR; import static java.sql.Types.NVARCHAR; import static java.sql.Types.TIMESTAMP; import static java.sql.Types.TIMESTAMP_WITH_TIMEZONE; import static java.sql.Types.TIME_WITH_TIMEZONE; import static java.sql.Types.VARCHAR; import static java.util.Locale.ENGLISH; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toSet; import static org.apache.hadoop.hbase.HConstants.FOREVER; import static org.apache.phoenix.coprocessor.BaseScannerRegionObserver.SKIP_REGION_BOUNDARY_CHECK; import static org.apache.phoenix.util.PhoenixRuntime.getTable; import static org.apache.phoenix.util.SchemaUtil.ESCAPE_CHARACTER; import static org.apache.phoenix.util.SchemaUtil.getEscapedArgument; public class PhoenixClient extends BaseJdbcClient { private static final String ROWKEY = "ROWKEY"; private final Configuration configuration; @Inject public PhoenixClient(PhoenixConfig config, ConnectionFactory connectionFactory, IdentifierMapping identifierMapping) throws SQLException { super( ESCAPE_CHARACTER, connectionFactory, ImmutableSet.of(), identifierMapping); this.configuration = new Configuration(false); getConnectionProperties(config).forEach((k, v) -> configuration.set((String) k, (String) v)); } public Connection getConnection(ConnectorSession session) throws SQLException { return connectionFactory.openConnection(session); } public org.apache.hadoop.hbase.client.Connection getHConnection() throws IOException { return HBaseFactoryProvider.getHConnectionFactory().createConnection(configuration); } @Override public void execute(ConnectorSession session, String statement) { super.execute(session, statement); } @Override public Collection<String> listSchemas(Connection connection) { try (ResultSet resultSet = connection.getMetaData().getSchemas()) { ImmutableSet.Builder<String> schemaNames = ImmutableSet.builder(); schemaNames.add(DEFAULT_SCHEMA); while (resultSet.next()) { String schemaName = getTableSchemaName(resultSet); // skip internal schemas if (filterSchema(schemaName)) { schemaNames.add(schemaName); } } return schemaNames.build(); } catch (SQLException e) { throw new TrinoException(PHOENIX_METADATA_ERROR, e); } } @Override public PreparedStatement buildSql(ConnectorSession session, Connection connection, JdbcSplit split, JdbcTableHandle table, List<JdbcColumnHandle> columnHandles) throws SQLException { PreparedStatement query = prepareStatement( session, connection, table, columnHandles, Optional.of(split)); QueryPlan queryPlan = getQueryPlan((PhoenixPreparedStatement) query); ResultSet resultSet = getResultSet(((PhoenixSplit) split).getPhoenixInputSplit(), queryPlan); return new DelegatePreparedStatement(query) { @Override public ResultSet executeQuery() { return resultSet; } }; } public PreparedStatement prepareStatement( ConnectorSession session, Connection connection, JdbcTableHandle table, List<JdbcColumnHandle> columns, Optional<JdbcSplit> split) throws SQLException { PreparedQuery preparedQuery = prepareQuery( session, connection, table, Optional.empty(), columns, ImmutableMap.of(), split); return new QueryBuilder(this).prepareStatement(session, connection, preparedQuery); } @Override public boolean supportsTopN(ConnectorSession session, JdbcTableHandle handle, List<JdbcSortItem> sortOrder) { return true; } @Override protected Optional<TopNFunction> topNFunction() { return Optional.of(TopNFunction.sqlStandard(this::quoted)); } @Override public boolean isTopNGuaranteed(ConnectorSession session) { // There are multiple splits and TopN is not guaranteed across them. return false; } @Override protected Optional<BiFunction<String, Long, String>> limitFunction() { return Optional.of((sql, limit) -> sql + " LIMIT " + limit); } @Override public boolean isLimitGuaranteed(ConnectorSession session) { return false; } @Override public String buildInsertSql(JdbcOutputTableHandle handle, List<WriteFunction> columnWriters) { PhoenixOutputTableHandle outputHandle = (PhoenixOutputTableHandle) handle; String params = columnWriters.stream() .map(WriteFunction::getBindExpression) .collect(joining(",")); String columns = handle.getColumnNames().stream() .map(SchemaUtil::getEscapedArgument) .collect(joining(",")); if (outputHandle.rowkeyColumn().isPresent()) { String nextId = format( "NEXT VALUE FOR %s, ", quoted(null, handle.getSchemaName(), handle.getTableName() + "_sequence")); params = nextId + params; columns = outputHandle.rowkeyColumn().get() + ", " + columns; } return format( "UPSERT INTO %s (%s) VALUES (%s)", quoted(null, handle.getSchemaName(), handle.getTableName()), columns, params); } @Override public ResultSet getTables(Connection connection, Optional<String> schemaName, Optional<String> tableName) throws SQLException { return super.getTables(connection, schemaName.map(MetadataUtil::toPhoenixSchemaName), tableName); } @Override protected String getTableSchemaName(ResultSet resultSet) throws SQLException { return firstNonNull(resultSet.getString("TABLE_SCHEM"), DEFAULT_SCHEMA); } @Override public Optional<ColumnMapping> toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle) { Optional<ColumnMapping> mapping = getForcedMappingToVarchar(typeHandle); if (mapping.isPresent()) { return mapping; } switch (typeHandle.getJdbcType()) { case Types.BOOLEAN: return Optional.of(booleanColumnMapping()); case Types.TINYINT: return Optional.of(tinyintColumnMapping()); case Types.SMALLINT: return Optional.of(smallintColumnMapping()); case Types.INTEGER: return Optional.of(integerColumnMapping()); case Types.BIGINT: return Optional.of(bigintColumnMapping()); case Types.FLOAT: return Optional.of(realColumnMapping()); case Types.DOUBLE: return Optional.of(doubleColumnMapping()); case Types.DECIMAL: int precision = typeHandle.getRequiredColumnSize(); int decimalDigits = typeHandle.getDecimalDigits().orElseThrow(() -> new IllegalStateException("decimal digits not present")); // TODO does phoenix support negative scale? precision = precision + max(-decimalDigits, 0); // Map decimal(p, -s) (negative scale) to decimal(p+s, 0). if (precision > Decimals.MAX_PRECISION) { break; } return Optional.of(decimalColumnMapping(createDecimalType(precision, max(decimalDigits, 0)), UNNECESSARY)); case Types.CHAR: return Optional.of(defaultCharColumnMapping(typeHandle.getRequiredColumnSize(), true)); case VARCHAR: case NVARCHAR: case LONGVARCHAR: case LONGNVARCHAR: if (typeHandle.getColumnSize().isEmpty()) { return Optional.of(varcharColumnMapping(createUnboundedVarcharType(), true)); } return Optional.of(defaultVarcharColumnMapping(typeHandle.getRequiredColumnSize(), true)); case Types.VARBINARY: return Optional.of(varbinaryColumnMapping()); case Types.DATE: return Optional.of(dateColumnMapping()); // TODO add support for TIMESTAMP after Phoenix adds support for LocalDateTime case TIMESTAMP: case TIME_WITH_TIMEZONE: case TIMESTAMP_WITH_TIMEZONE: if (getUnsupportedTypeHandling(session) == CONVERT_TO_VARCHAR) { return mapToUnboundedVarchar(typeHandle); } return Optional.empty(); case ARRAY: JdbcTypeHandle elementTypeHandle = getArrayElementTypeHandle(typeHandle); if (elementTypeHandle.getJdbcType() == Types.VARBINARY) { return Optional.empty(); } return toColumnMapping(session, connection, elementTypeHandle) .map(elementMapping -> { ArrayType trinoArrayType = new ArrayType(elementMapping.getType()); String jdbcTypeName = elementTypeHandle.getJdbcTypeName() .orElseThrow(() -> new TrinoException( PHOENIX_METADATA_ERROR, "Type name is missing for jdbc type: " + JDBCType.valueOf(elementTypeHandle.getJdbcType()))); return arrayColumnMapping(session, trinoArrayType, jdbcTypeName); }); } if (getUnsupportedTypeHandling(session) == CONVERT_TO_VARCHAR) { return mapToUnboundedVarchar(typeHandle); } return Optional.empty(); } @Override public WriteMapping toWriteMapping(ConnectorSession session, Type type) { if (type == BOOLEAN) { return WriteMapping.booleanMapping("boolean", booleanWriteFunction()); } if (type == TINYINT) { return WriteMapping.longMapping("tinyint", tinyintWriteFunction()); } if (type == SMALLINT) { return WriteMapping.longMapping("smallint", smallintWriteFunction()); } if (type == INTEGER) { return WriteMapping.longMapping("integer", integerWriteFunction()); } if (type == BIGINT) { return WriteMapping.longMapping("bigint", bigintWriteFunction()); } if (type == REAL) { return WriteMapping.longMapping("float", realWriteFunction()); } if (type == DOUBLE) { return WriteMapping.doubleMapping("double", doubleWriteFunction()); } if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; String dataType = format("decimal(%s, %s)", decimalType.getPrecision(), decimalType.getScale()); if (decimalType.isShort()) { return WriteMapping.longMapping(dataType, shortDecimalWriteFunction(decimalType)); } return WriteMapping.sliceMapping(dataType, longDecimalWriteFunction(decimalType)); } if (type instanceof CharType) { return WriteMapping.sliceMapping("char(" + ((CharType) type).getLength() + ")", charWriteFunction()); } if (type instanceof VarcharType) { VarcharType varcharType = (VarcharType) type; String dataType; if (varcharType.isUnbounded()) { dataType = "varchar"; } else { dataType = "varchar(" + varcharType.getBoundedLength() + ")"; } return WriteMapping.sliceMapping(dataType, varcharWriteFunction()); } if (type instanceof VarbinaryType) { return WriteMapping.sliceMapping("varbinary", varbinaryWriteFunction()); } if (type == DATE) { return WriteMapping.longMapping("date", dateWriteFunction()); } if (TIME.equals(type)) { return WriteMapping.longMapping("time", timeWriteFunctionUsingSqlTime()); } // Phoenix doesn't support _WITH_TIME_ZONE if (TIME_WITH_TIME_ZONE.equals(type) || TIMESTAMP_TZ_MILLIS.equals(type)) { throw new TrinoException(NOT_SUPPORTED, "Unsupported column type: " + type.getDisplayName()); } if (type instanceof ArrayType) { Type elementType = ((ArrayType) type).getElementType(); String elementDataType = toWriteMapping(session, elementType).getDataType().toUpperCase(ENGLISH); String elementWriteName = getArrayElementPhoenixTypeName(session, this, elementType); return WriteMapping.objectMapping(elementDataType + " ARRAY", arrayWriteFunction(session, elementType, elementWriteName)); } return legacyToWriteMapping(session, type); } @Override public JdbcOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata) { SchemaTableName schemaTableName = tableMetadata.getTable(); String schema = schemaTableName.getSchemaName(); String table = schemaTableName.getTableName(); if (!getSchemaNames(session).contains(schema)) { throw new SchemaNotFoundException(schema); } try (Connection connection = connectionFactory.openConnection(session)) { ConnectorIdentity identity = session.getIdentity(); schema = getIdentifierMapping().toRemoteSchemaName(identity, connection, schema); table = getIdentifierMapping().toRemoteTableName(identity, connection, schema, table); schema = toPhoenixSchemaName(schema); LinkedList<ColumnMetadata> tableColumns = new LinkedList<>(tableMetadata.getColumns()); Map<String, Object> tableProperties = tableMetadata.getProperties(); Optional<Boolean> immutableRows = PhoenixTableProperties.getImmutableRows(tableProperties); String immutable = immutableRows.isPresent() && immutableRows.get() ? "IMMUTABLE" : ""; ImmutableList.Builder<String> columnNames = ImmutableList.builder(); ImmutableList.Builder<Type> columnTypes = ImmutableList.builder(); ImmutableList.Builder<String> columnList = ImmutableList.builder(); Set<ColumnMetadata> rowkeyColumns = tableColumns.stream().filter(col -> isPrimaryKey(col, tableProperties)).collect(toSet()); ImmutableList.Builder<String> pkNames = ImmutableList.builder(); Optional<String> rowkeyColumn = Optional.empty(); if (rowkeyColumns.isEmpty()) { // Add a rowkey when not specified in DDL columnList.add(ROWKEY + " bigint not null"); pkNames.add(ROWKEY); execute(session, format("CREATE SEQUENCE %s", getEscapedTableName(schema, table + "_sequence"))); rowkeyColumn = Optional.of(ROWKEY); } for (ColumnMetadata column : tableColumns) { String columnName = getIdentifierMapping().toRemoteColumnName(connection, column.getName()); columnNames.add(columnName); columnTypes.add(column.getType()); String typeStatement = toWriteMapping(session, column.getType()).getDataType(); if (rowkeyColumns.contains(column)) { typeStatement += " not null"; pkNames.add(columnName); } columnList.add(format("%s %s", getEscapedArgument(columnName), typeStatement)); } ImmutableList.Builder<String> tableOptions = ImmutableList.builder(); PhoenixTableProperties.getSaltBuckets(tableProperties).ifPresent(value -> tableOptions.add(TableProperty.SALT_BUCKETS + "=" + value)); PhoenixTableProperties.getSplitOn(tableProperties).ifPresent(value -> tableOptions.add("SPLIT ON (" + value.replace('"', '\'') + ")")); PhoenixTableProperties.getDisableWal(tableProperties).ifPresent(value -> tableOptions.add(TableProperty.DISABLE_WAL + "=" + value)); PhoenixTableProperties.getDefaultColumnFamily(tableProperties).ifPresent(value -> tableOptions.add(TableProperty.DEFAULT_COLUMN_FAMILY + "=" + value)); PhoenixTableProperties.getBloomfilter(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.BLOOMFILTER + "='" + value + "'")); PhoenixTableProperties.getVersions(tableProperties).ifPresent(value -> tableOptions.add(HConstants.VERSIONS + "=" + value)); PhoenixTableProperties.getMinVersions(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.MIN_VERSIONS + "=" + value)); PhoenixTableProperties.getCompression(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.COMPRESSION + "='" + value + "'")); PhoenixTableProperties.getTimeToLive(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.TTL + "=" + value)); PhoenixTableProperties.getDataBlockEncoding(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.DATA_BLOCK_ENCODING + "='" + value + "'")); String sql = format( "CREATE %s TABLE %s (%s , CONSTRAINT PK PRIMARY KEY (%s)) %s", immutable, getEscapedTableName(schema, table), join(", ", columnList.build()), join(", ", pkNames.build()), join(", ", tableOptions.build())); execute(session, sql); return new PhoenixOutputTableHandle( schema, table, columnNames.build(), columnTypes.build(), Optional.empty(), rowkeyColumn); } catch (SQLException e) { if (e.getErrorCode() == SQLExceptionCode.TABLE_ALREADY_EXIST.getErrorCode()) { throw new TrinoException(ALREADY_EXISTS, "Phoenix table already exists", e); } throw new TrinoException(PHOENIX_METADATA_ERROR, "Error creating Phoenix table", e); } } @Override protected void renameTable(ConnectorSession session, String catalogName, String schemaName, String tableName, SchemaTableName newTable) { throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming tables"); } @Override public Map<String, Object> getTableProperties(ConnectorSession session, JdbcTableHandle handle) { ImmutableMap.Builder<String, Object> properties = ImmutableMap.builder(); try (Connection connection = connectionFactory.openConnection(session); Admin admin = connection.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) { String schemaName = toPhoenixSchemaName(handle.getSchemaName()); PTable table = getTable(connection, SchemaUtil.getTableName(schemaName, handle.getTableName())); boolean salted = table.getBucketNum() != null; StringJoiner joiner = new StringJoiner(","); List<PColumn> pkColumns = table.getPKColumns(); for (PColumn pkColumn : pkColumns.subList(salted ? 1 : 0, pkColumns.size())) { joiner.add(pkColumn.getName().getString()); } properties.put(PhoenixTableProperties.ROWKEYS, joiner.toString()); if (table.getBucketNum() != null) { properties.put(PhoenixTableProperties.SALT_BUCKETS, table.getBucketNum()); } if (table.isWALDisabled()) { properties.put(PhoenixTableProperties.DISABLE_WAL, table.isWALDisabled()); } if (table.isImmutableRows()) { properties.put(PhoenixTableProperties.IMMUTABLE_ROWS, table.isImmutableRows()); } String defaultFamilyName = QueryConstants.DEFAULT_COLUMN_FAMILY; if (table.getDefaultFamilyName() != null) { defaultFamilyName = table.getDefaultFamilyName().getString(); properties.put(PhoenixTableProperties.DEFAULT_COLUMN_FAMILY, defaultFamilyName); } HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(table.getPhysicalName().getBytes())); HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies(); for (HColumnDescriptor columnFamily : columnFamilies) { if (columnFamily.getNameAsString().equals(defaultFamilyName)) { if (columnFamily.getBloomFilterType() != BloomType.NONE) { properties.put(PhoenixTableProperties.BLOOMFILTER, columnFamily.getBloomFilterType()); } if (columnFamily.getMaxVersions() != 1) { properties.put(PhoenixTableProperties.VERSIONS, columnFamily.getMaxVersions()); } if (columnFamily.getMinVersions() > 0) { properties.put(PhoenixTableProperties.MIN_VERSIONS, columnFamily.getMinVersions()); } if (columnFamily.getCompression() != Compression.Algorithm.NONE) { properties.put(PhoenixTableProperties.COMPRESSION, columnFamily.getCompression()); } if (columnFamily.getTimeToLive() < FOREVER) { properties.put(PhoenixTableProperties.TTL, columnFamily.getTimeToLive()); } if (columnFamily.getDataBlockEncoding() != DataBlockEncoding.NONE) { properties.put(PhoenixTableProperties.DATA_BLOCK_ENCODING, columnFamily.getDataBlockEncoding()); } break; } } } catch (IOException | SQLException e) { throw new TrinoException(PHOENIX_METADATA_ERROR, "Couldn't get Phoenix table properties", e); } return properties.build(); } private static ColumnMapping arrayColumnMapping(ConnectorSession session, ArrayType arrayType, String elementJdbcTypeName) { return ColumnMapping.objectMapping( arrayType, arrayReadFunction(session, arrayType.getElementType()), arrayWriteFunction(session, arrayType.getElementType(), elementJdbcTypeName)); } private static ObjectReadFunction arrayReadFunction(ConnectorSession session, Type elementType) { return ObjectReadFunction.of(Block.class, (resultSet, columnIndex) -> { Object[] objectArray = toBoxedArray(resultSet.getArray(columnIndex).getArray()); return jdbcObjectArrayToBlock(session, elementType, objectArray); }); } private static ObjectWriteFunction arrayWriteFunction(ConnectorSession session, Type elementType, String elementJdbcTypeName) { return ObjectWriteFunction.of(Block.class, (statement, index, block) -> { Array jdbcArray = statement.getConnection().createArrayOf(elementJdbcTypeName, getJdbcObjectArray(session, elementType, block)); statement.setArray(index, jdbcArray); }); } private JdbcTypeHandle getArrayElementTypeHandle(JdbcTypeHandle arrayTypeHandle) { String arrayTypeName = arrayTypeHandle.getJdbcTypeName() .orElseThrow(() -> new TrinoException(PHOENIX_METADATA_ERROR, "Type name is missing for jdbc type: " + JDBCType.valueOf(arrayTypeHandle.getJdbcType()))); checkArgument(arrayTypeName.endsWith(" ARRAY"), "array type must end with ' ARRAY'"); arrayTypeName = arrayTypeName.substring(0, arrayTypeName.length() - " ARRAY".length()); verify(arrayTypeHandle.getCaseSensitivity().isEmpty(), "Case sensitivity not supported"); return new JdbcTypeHandle( PDataType.fromSqlTypeName(arrayTypeName).getSqlType(), Optional.of(arrayTypeName), arrayTypeHandle.getColumnSize(), arrayTypeHandle.getDecimalDigits(), arrayTypeHandle.getArrayDimensions(), Optional.empty()); } public QueryPlan getQueryPlan(PhoenixPreparedStatement inputQuery) { try { // Optimize the query plan so that we potentially use secondary indexes QueryPlan queryPlan = inputQuery.optimizeQuery(); // Initialize the query plan so it sets up the parallel scans queryPlan.iterator(MapReduceParallelScanGrouper.getInstance()); return queryPlan; } catch (SQLException e) { throw new TrinoException(PHOENIX_QUERY_ERROR, "Failed to get the Phoenix query plan", e); } } private static ResultSet getResultSet(PhoenixInputSplit split, QueryPlan queryPlan) { List<Scan> scans = split.getScans(); try { List<PeekingResultIterator> iterators = new ArrayList<>(scans.size()); StatementContext context = queryPlan.getContext(); // Clear the table region boundary cache to make sure long running jobs stay up to date PName physicalTableName = queryPlan.getTableRef().getTable().getPhysicalName(); PhoenixConnection phoenixConnection = context.getConnection(); ConnectionQueryServices services = phoenixConnection.getQueryServices(); services.clearTableRegionCache(TableName.valueOf(physicalTableName.getBytes())); for (Scan scan : scans) { scan = new Scan(scan); // For MR, skip the region boundary check exception if we encounter a split. ref: PHOENIX-2599 scan.setAttribute(SKIP_REGION_BOUNDARY_CHECK, Bytes.toBytes(true)); ScanMetricsHolder scanMetricsHolder = ScanMetricsHolder.getInstance( context.getReadMetricsQueue(), physicalTableName.getString(), scan, phoenixConnection.getLogLevel()); TableResultIterator tableResultIterator = new TableResultIterator( phoenixConnection.getMutationState(), scan, scanMetricsHolder, services.getRenewLeaseThresholdMilliSeconds(), queryPlan, MapReduceParallelScanGrouper.getInstance()); iterators.add(LookAheadResultIterator.wrap(tableResultIterator)); } ResultIterator iterator = queryPlan.useRoundRobinIterator() ? RoundRobinResultIterator.newIterator(iterators, queryPlan) : ConcatResultIterator.newIterator(iterators); if (context.getSequenceManager().getSequenceCount() > 0) { iterator = new SequenceResultIterator(iterator, context.getSequenceManager()); } // Clone the row projector as it's not thread safe and would be used simultaneously by // multiple threads otherwise. return new PhoenixResultSet(iterator, queryPlan.getProjector().cloneIfNecessary(), context); } catch (SQLException e) { throw new TrinoException(PHOENIX_QUERY_ERROR, "Error while setting up Phoenix ResultSet", e); } catch (IOException e) { throw new TrinoException(PhoenixErrorCode.PHOENIX_INTERNAL_ERROR, "Error while copying scan", e); } } }
/* * Copyright (C) 2017 UrbanThings. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * UrbanThings is a trading name of FatAttitude Limited */ package io.urbanthings.api.rx.transit; import java.util.Date; import java.util.List; import io.urbanthings.api.rx.BaseRxApi; import io.urbanthings.api.rx.transit.internal.RetrofitRxTransitApi; import io.urbanthings.api.transit.model.ApiResponse; import io.urbanthings.api.transit.model.DirectionsRequest; import io.urbanthings.api.transit.model.DirectionsResponse; import io.urbanthings.api.transit.model.PlacePointList; import io.urbanthings.api.transit.model.PlacePointType; import io.urbanthings.api.transit.model.ResourceStatus; import io.urbanthings.api.transit.model.StopBoardResponse; import io.urbanthings.api.transit.model.TransitDetailedRouteInfo; import io.urbanthings.api.transit.model.TransitStop; import io.urbanthings.api.transit.model.TransitStopRTIResponse; import io.urbanthings.api.transit.model.TransitStopScheduledCalls; import io.urbanthings.api.transit.model.TransitTrip; import io.urbanthings.api.transit.model.TransitTripCalendarGroup; import io.urbanthings.api.transit.model.VehiclePassingType; import io.urbanthings.api.transit.model.VehicleType; import okhttp3.logging.HttpLoggingInterceptor; import retrofit2.adapter.rxjava.Result; import rx.Single; import rx.functions.Func1; public class RxTransitApi extends BaseRxApi { private static final String VERSION = "2.0"; private RetrofitRxTransitApi api; private String key; protected RxTransitApi(RetrofitRxTransitApi api, String key, HttpLoggingInterceptor loggingInterceptor) { super(loggingInterceptor); this.api = api; this.key = key; } /** * Get a List of TransitStops for a given bounding region. The list can be filtered by matching * against the name of the transit stop, as well as 0 or more VehicleTypes * * Max size of the bounding box is 20km x 20km * @param minLat * @param maxLat * @param minLng * @param maxLng * @param stopModes List of VehicleTypes * @return */ public Single<List<TransitStop>> getStopsInRect(double minLat, double maxLat, double minLng, double maxLng, List<VehicleType> stopModes) { return api.getStops(minLat, maxLat, minLng, maxLng, vehicleTypeListToCommaSeperatedList(stopModes), null, null, null, VERSION, key).map(new Func1<Result<ApiResponse<List<TransitStop>>>, List<TransitStop>>() { @Override public List<TransitStop> call(Result<ApiResponse<List<TransitStop>>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get a List of TransitStops for a given radius. The list can be filtered by matching * against the name of the transit stop, as well as 0 or more VehicleTypes * * @param centerLat * @param centerLng * @param radius * @param stopModes List of VehicleTypes * @return */ public Single<List<TransitStop>> getStopsInRadius(double centerLat, double centerLng, double radius, List<VehicleType> stopModes) { return api.getStops(centerLat, centerLng, radius, vehicleTypeListToCommaSeperatedList(stopModes), null, null, null, VERSION, key).map(new Func1<Result<ApiResponse<List<TransitStop>>>, List<TransitStop>>() { @Override public List<TransitStop> call(Result<ApiResponse<List<TransitStop>>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get a list of PlacePoints for a given bounding region. The list can be filtered by matching * against the name of the PlacePoint, as well as 0 or more PlacePointTypes * * @param minLat * @param maxLat * @param minLng * @param maxLng * @param placePointTypes * @param maxResults * @return */ public Single<PlacePointList> getPlacePointsInRect(double minLat, double maxLat, double minLng, double maxLng, List<PlacePointType> placePointTypes, Integer maxResults) { return api.getPlacePoints(minLat, maxLat, minLng, maxLng, placePointTypeListToCommaSeperatedList(placePointTypes), maxResults, VERSION, key).map(new Func1<Result<ApiResponse<PlacePointList>>, PlacePointList>() { @Override public PlacePointList call(Result<ApiResponse<PlacePointList>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get a list of PlacePoints for a given radius. The list can be filtered by matching * against the name of the PlacePoint, as well as 0 or more PlacePointTypes * * @param lat * @param lng * @param radius * @param placePointTypes * @param maxResults * @return */ public Single<PlacePointList> getPlacePointsInRadius(double lat, double lng, int radius, List<PlacePointType> placePointTypes, Integer maxResults) { return api.getPlacePoints(lat, lng, radius, placePointTypeListToCommaSeperatedList(placePointTypes), maxResults, VERSION, key).map(new Func1<Result<ApiResponse<PlacePointList>>, PlacePointList>() { @Override public PlacePointList call(Result<ApiResponse<PlacePointList>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get a list of Routes and associated info for a given transport agency matching the supplied query * * @param lineName * @param exactMatch * @param agencyId * @param agencyRegion * @return */ public Single<List<TransitDetailedRouteInfo>> searchRoutesByAgency(String lineName, Boolean exactMatch, String agencyId, String agencyRegion) { return api.searchRoutes(lineName, exactMatch, null, agencyId, agencyRegion, null, null, VERSION, key).map(new Func1<Result<ApiResponse<List<TransitDetailedRouteInfo>>>, List<TransitDetailedRouteInfo>>() { @Override public List<TransitDetailedRouteInfo> call(Result<ApiResponse<List<TransitDetailedRouteInfo>>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get a list of Routes and associated info near the supplied location matching the supplied query * @param lineName * @param exactMatch * @param lat * @param lng * @return */ public Single<List<TransitDetailedRouteInfo>> searchRoutesByLocation(String lineName, Boolean exactMatch, Double lat, Double lng) { return api.searchRoutes(lineName, exactMatch, null, null, null, lat, lng, VERSION, key).map(new Func1<Result<ApiResponse<List<TransitDetailedRouteInfo>>>, List<TransitDetailedRouteInfo>>() { @Override public List<TransitDetailedRouteInfo> call(Result<ApiResponse<List<TransitDetailedRouteInfo>>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get a list of Routes and associated info for a given transport agency * * @param agencyId * @return */ public Single<List<TransitDetailedRouteInfo>> getRoutesByAgency(String agencyId) { return api.getRoutes(agencyId, null, VERSION, key).map(new Func1<Result<ApiResponse<List<TransitDetailedRouteInfo>>>, List<TransitDetailedRouteInfo>>() { @Override public List<TransitDetailedRouteInfo> call(Result<ApiResponse<List<TransitDetailedRouteInfo>>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get a list of Routes and associated info for a given Stop * @param stopId * @return */ public Single<List<TransitDetailedRouteInfo>> getRoutesForStop(String stopId) { return api.getRoutesForStop(stopId, VERSION, key).map(new Func1<Result<ApiResponse<List<TransitDetailedRouteInfo>>>, List<TransitDetailedRouteInfo>>() { @Override public List<TransitDetailedRouteInfo> call(Result<ApiResponse<List<TransitDetailedRouteInfo>>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get a Trip for a given TripId * @param tripId * @param originStopId * @param destinationStopId * @param includePolyLines * @param includeStopCoordinates * @return */ public Single<TransitTrip> getTrip(String tripId, String originStopId, String destinationStopId, Boolean includePolyLines, Boolean includeStopCoordinates) { return api.getTrips(null, tripId, originStopId, destinationStopId, includePolyLines, includeStopCoordinates, VERSION, key).map(new Func1<Result<ApiResponse<List<TransitTrip>>>, TransitTrip>() { @Override public TransitTrip call(Result<ApiResponse<List<TransitTrip>>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data.size() > 0 ? apiResponseResult.response().body().data.get(0) : new TransitTrip(); } }); } /** * Get a List of Trips for a given routeId * @param routeId * @param originStopId * @param destinationStopId * @param includePolyLines * @param includeStopCoordinates * @return */ public Single<List<TransitTrip>> getTripsForRoute(String routeId, String originStopId, String destinationStopId, Boolean includePolyLines, Boolean includeStopCoordinates) { return api.getTrips(routeId, null, originStopId, destinationStopId, includePolyLines, includeStopCoordinates, VERSION, key).map(new Func1<Result<ApiResponse<List<TransitTrip>>>, List<TransitTrip>>() { @Override public List<TransitTrip> call(Result<ApiResponse<List<TransitTrip>>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * * @param routeId * @param includePolyLines * @param includeStopCoordinates * @return */ public Single<List<TransitTripCalendarGroup>> getTripsByCalendarGroups(String routeId, Boolean includePolyLines, Boolean includeStopCoordinates) { return api.getTripsByCalendar(routeId, includePolyLines, includeStopCoordinates, VERSION, key).map(new Func1<Result<ApiResponse<List<TransitTripCalendarGroup>>>, List<TransitTripCalendarGroup>>() { @Override public List<TransitTripCalendarGroup> call(Result<ApiResponse<List<TransitTripCalendarGroup>>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get a List of TransitStopScheduledCalls for a given stopId, with an optional query time and look ahead * @param stopId * @param queryTime * @param lookAheadMinutes * @return */ public Single<TransitStopScheduledCalls> getStopCalls(String stopId, Date queryTime, Integer lookAheadMinutes) { return api.getStopCalls(stopId, queryTime, lookAheadMinutes, VERSION, key).map(new Func1<Result<ApiResponse<TransitStopScheduledCalls>>, TransitStopScheduledCalls>() { @Override public TransitStopScheduledCalls call(Result<ApiResponse<TransitStopScheduledCalls>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get the resource status of a stopId such as a car park or bike dock * @param stopId * @return */ public Single<List<ResourceStatus>> getResourceStatus(String stopId) { return api.getResourceStatus(stopId, VERSION, key).map(new Func1<Result<ApiResponse<List<ResourceStatus>>>, List<ResourceStatus>>() { @Override public List<ResourceStatus> call(Result<ApiResponse<List<ResourceStatus>>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get the real time arrivals and departures information for a stop, formatted for data processing purposes. * @param stopId * @param maxItems * @param lookAheadMinutes * @return */ public Single<TransitStopRTIResponse> getStopReport(String stopId, Integer maxItems, Integer lookAheadMinutes) { return api.getStopReport(stopId, maxItems, lookAheadMinutes, VERSION, key).map(new Func1<Result<ApiResponse<TransitStopRTIResponse>>, TransitStopRTIResponse>() { @Override public TransitStopRTIResponse call(Result<ApiResponse<TransitStopRTIResponse>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get the real time arrivals and departures information for a stop, formatted for display purposes. * @param stopId * @param maxItems * @param vehiclePassingType * @param use24clock * @return */ public Single<StopBoardResponse> getStopBoard(String stopId, Integer maxItems, VehiclePassingType vehiclePassingType, Boolean use24clock) { return api.getStopboard(stopId, maxItems, vehiclePassingType, use24clock, VERSION, key).map(new Func1<Result<ApiResponse<StopBoardResponse>>, StopBoardResponse>() { @Override public StopBoardResponse call(Result<ApiResponse<StopBoardResponse>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get Journey options for the provided DirectionsRequest object * @param request * @return */ public Single<DirectionsResponse> getDirections(DirectionsRequest request) { return api.getDirections(request, VERSION, key).map(new Func1<Result<ApiResponse<DirectionsResponse>>, DirectionsResponse>() { @Override public DirectionsResponse call(Result<ApiResponse<DirectionsResponse>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get details of a route for a given routeId * * @param routeId * @param routeToken * @param lineName * @param originStopId * @param aimedDepartureTime * @param expectedDepartureTime * @return */ public Single<DirectionsResponse> getRoute(String routeId, String routeToken, String lineName, String originStopId, Date aimedDepartureTime, Date expectedDepartureTime) { return api.getRoute(routeId, routeToken, lineName, originStopId, aimedDepartureTime, expectedDepartureTime, VERSION, key).map(new Func1<Result<ApiResponse<DirectionsResponse>>, DirectionsResponse>() { @Override public DirectionsResponse call(Result<ApiResponse<DirectionsResponse>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } /** * Get a list of PlacePoints based on the query and supplied location * @param name * @param lat * @param lng * @param maxResultsPerType * @return */ public Single<PlacePointList> searchPlacePoints(String name, double lat, double lng, Integer maxResultsPerType) { return api.searchPlacePoints(name, lat, lng, null, maxResultsPerType, VERSION, key).map(new Func1<Result<ApiResponse<PlacePointList>>, PlacePointList>() { @Override public PlacePointList call(Result<ApiResponse<PlacePointList>> apiResponseResult) { handleResult(apiResponseResult); return apiResponseResult.response().body().data; } }); } }
package org.math.plot.plotObjects; import static java.lang.Math.*; /** * BSD License * * @author Yann RICHET */ public class Base { public final static int LINEAR = 0; public final static int LOG = 1; public double[][] baseCoords; protected double[] precisionUnit; public double[] roundXmin; public double[] roundXmax; protected double[] trueXmin; protected double[] trueXmax; public int dimension; public int[] axesScales; public Base(double[] Xmi, double[] Xma, int[] scales) { trueXmin = Xmi; trueXmax = Xma; dimension = trueXmin.length; axesScales = scales; init(trueXmin.length); setRoundBounds(trueXmin, trueXmax); resetCoords(); } private void init(int d) { precisionUnit = new double[d]; roundXmin = new double[d]; roundXmax = new double[d]; trueXmin = new double[d]; trueXmax = new double[d]; } private void resetCoords() { baseCoords = new double[dimension + 1][]; for (int i = 0; i < baseCoords.length; i++) { baseCoords[i] = (double[]) (roundXmin.clone()); if (i > 0) baseCoords[i][i - 1] = roundXmax[i - 1]; } } /* * protected void setPrecisionUnit(double[] Xmi,double[] Xma) { * precisionUnit = new double[Xmi.length]; for (int i = 0; i < * precisionUnit.length; i++) { setPrecisionUnit(Xmi[i],Xma[i], i); } } */ private void setPrecisionUnit(int i, double Xmi, double Xma) { if (Xma - Xmi > 0) { precisionUnit[i] = pow(10, floor(log(Xma - Xmi) / log(10))); } else { precisionUnit[i] = 1; } // System.out.println("precisionUnit["+i+"] = "+precisionUnit[i]); } public void setAxesScales(int[] scales) { axesScales = scales; setRoundBounds(trueXmin, trueXmax); resetCoords(); } public void setAxesScales(int i, int scale) { axesScales[i] = scale; setRoundBounds(trueXmin, trueXmax); resetCoords(); } public double[][] getCoords() { return baseCoords; } /* * public int getDimension() { return dimension; } */ public int[] getAxesScales() { return axesScales; } public int getAxeScale(int i) { return axesScales[i]; } public double[] getMinBounds() { return roundXmin; } public double[] getMaxBounds() { return roundXmax; } public double[] getPrecisionUnit() { return precisionUnit; } // /////////////////////////////////////////// // ////// bounds methods ///////////////////// // /////////////////////////////////////////// private void setBounds(int i, double Xmi, double Xma) { if ((Xmi <= 0) && (axesScales[i] == LOG)) { throw new IllegalArgumentException("Error while bounding dimension " + (i + 1) + " : bounds [" + Xmi + "," + Xma + "] are incompatible with Logarithm scale."); } if (Xmi == Xma) { Xmi = Xma - 1; } if (Xmi > Xma) { throw new IllegalArgumentException("Error while bounding dimension " + (i + 1) + " : min " + Xmi + " must be < to max " + Xma); } roundXmin[i] = Xmi; roundXmax[i] = Xma; resetCoords(); } /* * private void setBounds(double[] Xmi, double[] Xma) { for (int i = 0; i < * Xmi.length; i++) { setBounds(i, Xmi[i], Xma[i]); } } */ public void setFixedBounds(int i, double Xmi, double Xma) { setPrecisionUnit(i, Xmi, Xma); setBounds(i, Xmi, Xma); } public void setFixedBounds(double[] Xmi, double[] Xma) { for (int i = 0; i < Xmi.length; i++) { setFixedBounds(i, Xmi[i], Xma[i]); } } public void roundBounds(int i) { setPrecisionUnit(i, trueXmin[i], trueXmax[i]); if (axesScales[i] == LOG) { setBounds(i, pow(10, floor(log(trueXmin[i]) / log(10))), pow(10, ceil(log(trueXmax[i]) / log(10)))); } else if (axesScales[i] == LINEAR) { setBounds(i, precisionUnit[i] * (floor(trueXmin[i] / precisionUnit[i])), precisionUnit[i] * (ceil(trueXmax[i] / precisionUnit[i]))); } /* * System.out.println("precisionUnit[" + i + "]=" + precisionUnit[i]); * System.out.println("trueXmin["+i+"]="+trueXmin[i]); * System.out.println("trueXmax["+i+"]="+trueXmax[i]); * System.out.println("roundXmin["+i+"]="+roundXmin[i]); * System.out.println("roundXmax["+i+"]="+roundXmax[i]); * * System.out.println("Xmi=" + trueXmin[i] + " Xma=" + trueXmax[i]); * System.out.println( " -> precisionUnit[i] * (Math.floor(Xmi / * precisionUnit[i]))=" + precisionUnit[i] * (Math.floor(trueXmin[i] / * precisionUnit[i]))); System.out.println( " -> precisionUnit[i] * * (Math.ceil(Xma / precisionUnit[i]))=" + precisionUnit[i] * * (ceil(trueXmax[i] / precisionUnit[i]))); */ } public void setRoundBounds(int i, double Xmi, double Xma) { trueXmin[i] = Xmi; trueXmax[i] = Xma; roundBounds(i); } public void setRoundBounds(double[] Xmi, double[] Xma) { for (int i = 0; i < Xmi.length; i++) { trueXmin[i] = Xmi[i]; trueXmax[i] = Xma[i]; roundBounds(i); } } public void includeInBounds(int dim, double XY) { for (int i = 0; i < roundXmin.length; i++) { if (i == dim) if (XY < trueXmin[i]) trueXmin[i] = XY; } for (int i = 0; i < roundXmax.length; i++) { if (i == dim) if (XY > trueXmax[i]) trueXmax[i] = XY; } roundBounds(dim); } public void includeInBounds(double[] XY) { for (int i = 0; i < roundXmin.length; i++) { if (XY[i] < trueXmin[i]) trueXmin[i] = XY[i]; } for (int i = 0; i < roundXmax.length; i++) { if (XY[i] > trueXmax[i]) trueXmax[i] = XY[i]; } setRoundBounds(trueXmin, trueXmax); } // /////////////////////////////////////////// // ////// other public methods /////////////// // /////////////////////////////////////////// public boolean authorizedLogScale(int i) { // System.out.println("Xmin[" + i + "] = " + roundXmin[i]); if (roundXmin[i] > 0) { return true; } else { return false; } } public String toString() { StringBuffer s = new StringBuffer(); for (int i = 0; i < baseCoords.length; i++) { s.append("["); for (int j = 0; j < baseCoords[i].length; j++) s.append(baseCoords[i][j] + ","); s.deleteCharAt(s.length() - 1); s.append("]"); } return s.toString(); } }
/* * oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text. * * Copyright (c) 2014, Gluu */ package org.xdi.oxauth.model.crypto; import org.apache.commons.codec.binary.Base64; import org.apache.log4j.Logger; import org.bouncycastle.asn1.ASN1EncodableVector; import org.bouncycastle.asn1.ASN1ObjectIdentifier; import org.bouncycastle.asn1.DERSequence; import org.bouncycastle.asn1.x500.X500Name; import org.bouncycastle.asn1.x509.KeyPurposeId; import org.bouncycastle.cert.CertIOException; import org.bouncycastle.cert.X509CertificateHolder; import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter; import org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder; import org.bouncycastle.operator.ContentSigner; import org.bouncycastle.operator.OperatorCreationException; import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder; import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import org.xdi.oxauth.model.crypto.signature.SignatureAlgorithm; import org.xdi.oxauth.model.crypto.signature.SignatureAlgorithmFamily; import org.xdi.oxauth.model.jwk.Use; import org.xdi.oxauth.model.util.Base64Util; import org.xdi.oxauth.model.util.Util; import javax.crypto.Mac; import javax.crypto.SecretKey; import javax.crypto.spec.SecretKeySpec; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.math.BigInteger; import java.security.*; import java.security.Key; import java.security.PrivateKey; import java.security.PublicKey; import java.security.cert.Certificate; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import java.security.interfaces.ECPublicKey; import java.security.interfaces.RSAPublicKey; import java.security.spec.ECGenParameterSpec; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.UUID; import static org.xdi.oxauth.model.jwk.JWKParameter.*; /** * @author Javier Rojas Blum * @author Yuriy Movchan * @version August 28, 2017 */ public class OxAuthCryptoProvider extends AbstractCryptoProvider { private static final Logger LOG = Logger.getLogger(OxAuthCryptoProvider.class); private KeyStore keyStore; private String keyStoreFile; private String keyStoreSecret; private String dnName; public OxAuthCryptoProvider() throws Exception { this(null, null, null); } public OxAuthCryptoProvider(String keyStoreFile, String keyStoreSecret, String dnName) throws Exception { if (!Util.isNullOrEmpty(keyStoreFile) && !Util.isNullOrEmpty(keyStoreSecret) /* && !Util.isNullOrEmpty(dnName) */) { this.keyStoreFile = keyStoreFile; this.keyStoreSecret = keyStoreSecret; this.dnName = dnName; keyStore = KeyStore.getInstance("JKS"); try { File f = new File(keyStoreFile); if (!f.exists()) { keyStore.load(null, keyStoreSecret.toCharArray()); FileOutputStream fos = new FileOutputStream(keyStoreFile); keyStore.store(fos, keyStoreSecret.toCharArray()); fos.close(); } final InputStream is = new FileInputStream(keyStoreFile); keyStore.load(is, keyStoreSecret.toCharArray()); } catch (Exception e) { LOG.error(e.getMessage(), e); } } } @Override public JSONObject generateKey(SignatureAlgorithm signatureAlgorithm, Long expirationTime) throws Exception { KeyPairGenerator keyGen = null; if (signatureAlgorithm == null) { throw new RuntimeException("The signature algorithm parameter cannot be null"); } else if (SignatureAlgorithmFamily.RSA.equals(signatureAlgorithm.getFamily())) { keyGen = KeyPairGenerator.getInstance(signatureAlgorithm.getFamily().toString(), "BC"); keyGen.initialize(2048, new SecureRandom()); } else if (SignatureAlgorithmFamily.EC.equals(signatureAlgorithm.getFamily())) { ECGenParameterSpec eccgen = new ECGenParameterSpec(signatureAlgorithm.getCurve().getAlias()); keyGen = KeyPairGenerator.getInstance(signatureAlgorithm.getFamily().toString(), "BC"); keyGen.initialize(eccgen, new SecureRandom()); } else { throw new RuntimeException("The provided signature algorithm parameter is not supported"); } // Generate the key KeyPair keyPair = keyGen.generateKeyPair(); java.security.PrivateKey pk = keyPair.getPrivate(); // Java API requires a certificate chain X509Certificate cert = generateV3Certificate(keyPair, dnName, signatureAlgorithm.getAlgorithm(), expirationTime); X509Certificate[] chain = new X509Certificate[1]; chain[0] = cert; String alias = UUID.randomUUID().toString(); keyStore.setKeyEntry(alias, pk, keyStoreSecret.toCharArray(), chain); FileOutputStream stream = new FileOutputStream(keyStoreFile); keyStore.store(stream, keyStoreSecret.toCharArray()); PublicKey publicKey = keyPair.getPublic(); JSONObject jsonObject = new JSONObject(); jsonObject.put(KEY_TYPE, signatureAlgorithm.getFamily()); jsonObject.put(KEY_ID, alias); jsonObject.put(KEY_USE, Use.SIGNATURE); jsonObject.put(ALGORITHM, signatureAlgorithm.getName()); jsonObject.put(EXPIRATION_TIME, expirationTime); if (publicKey instanceof RSAPublicKey) { RSAPublicKey rsaPublicKey = (RSAPublicKey) publicKey; jsonObject.put(MODULUS, Base64Util.base64urlencodeUnsignedBigInt(rsaPublicKey.getModulus())); jsonObject.put(EXPONENT, Base64Util.base64urlencodeUnsignedBigInt(rsaPublicKey.getPublicExponent())); } else if (publicKey instanceof ECPublicKey) { ECPublicKey ecPublicKey = (ECPublicKey) publicKey; jsonObject.put(CURVE, signatureAlgorithm.getCurve()); jsonObject.put(X, Base64Util.base64urlencodeUnsignedBigInt(ecPublicKey.getW().getAffineX())); jsonObject.put(Y, Base64Util.base64urlencodeUnsignedBigInt(ecPublicKey.getW().getAffineY())); } JSONArray x5c = new JSONArray(); x5c.put(Base64.encodeBase64String(cert.getEncoded())); jsonObject.put(CERTIFICATE_CHAIN, x5c); return jsonObject; } @Override public String sign(String signingInput, String alias, String sharedSecret, SignatureAlgorithm signatureAlgorithm) throws Exception { if (signatureAlgorithm == SignatureAlgorithm.NONE) { return ""; } else if (SignatureAlgorithmFamily.HMAC.equals(signatureAlgorithm.getFamily())) { SecretKey secretKey = new SecretKeySpec(sharedSecret.getBytes(Util.UTF8_STRING_ENCODING), signatureAlgorithm.getAlgorithm()); Mac mac = Mac.getInstance(signatureAlgorithm.getAlgorithm()); mac.init(secretKey); byte[] sig = mac.doFinal(signingInput.getBytes()); return Base64Util.base64urlencode(sig); } else { // EC or RSA PrivateKey privateKey = getPrivateKey(alias); Signature signature = Signature.getInstance(signatureAlgorithm.getAlgorithm(), "BC"); //Signature signature = Signature.getInstance(signatureAlgorithm.getAlgorithm()); signature.initSign(privateKey); signature.update(signingInput.getBytes()); return Base64Util.base64urlencode(signature.sign()); } } @Override public boolean verifySignature(String signingInput, String encodedSignature, String alias, JSONObject jwks, String sharedSecret, SignatureAlgorithm signatureAlgorithm) throws Exception { boolean verified = false; if (signatureAlgorithm == SignatureAlgorithm.NONE) { return Util.isNullOrEmpty(encodedSignature); } else if (SignatureAlgorithmFamily.HMAC.equals(signatureAlgorithm.getFamily())) { String expectedSignature = sign(signingInput, null, sharedSecret, signatureAlgorithm); return expectedSignature.equals(encodedSignature); } else { // EC or RSA PublicKey publicKey = null; try { if (jwks == null) { publicKey = getPublicKey(alias); } else { publicKey = getPublicKey(alias, jwks); } if (publicKey == null) { return false; } byte[] signature = Base64Util.base64urldecode(encodedSignature); Signature verifier = Signature.getInstance(signatureAlgorithm.getAlgorithm(), "BC"); //Signature verifier = Signature.getInstance(signatureAlgorithm.getAlgorithm()); verifier.initVerify(publicKey); verifier.update(signingInput.getBytes()); verified = verifier.verify(signature); } catch (NoSuchAlgorithmException e) { LOG.error(e.getMessage(), e); verified = false; } catch (SignatureException e) { LOG.error(e.getMessage(), e); verified = false; } catch (InvalidKeyException e) { LOG.error(e.getMessage(), e); verified = false; } catch (Exception e) { LOG.error(e.getMessage(), e); verified = false; } } return verified; } private String getJWKSValue(JSONObject jwks, String node) throws JSONException { try { return jwks.getString(node); } catch (Exception ex) { JSONObject publicKey = jwks.getJSONObject(PUBLIC_KEY); return publicKey.getString(node); } } @Override public boolean deleteKey(String alias) throws Exception { keyStore.deleteEntry(alias); FileOutputStream stream = new FileOutputStream(keyStoreFile); keyStore.store(stream, keyStoreSecret.toCharArray()); return true; } public PublicKey getPublicKey(String alias) { PublicKey publicKey = null; try { if (Util.isNullOrEmpty(alias)) { return null; } java.security.cert.Certificate certificate = keyStore.getCertificate(alias); if (certificate == null) { return null; } publicKey = certificate.getPublicKey(); } catch (KeyStoreException e) { e.printStackTrace(); } return publicKey; } public PrivateKey getPrivateKey(String alias) throws UnrecoverableKeyException, NoSuchAlgorithmException, KeyStoreException { if (Util.isNullOrEmpty(alias)) { return null; } Key key = keyStore.getKey(alias, keyStoreSecret.toCharArray()); if (key == null) { return null; } PrivateKey privateKey = (PrivateKey) key; return privateKey; } public X509Certificate generateV3Certificate(KeyPair keyPair, String issuer, String signatureAlgorithm, Long expirationTime) throws CertIOException, OperatorCreationException, CertificateException { PrivateKey privateKey = keyPair.getPrivate(); PublicKey publicKey = keyPair.getPublic(); // Signers name X500Name issuerName = new X500Name(issuer); // Subjects name - the same as we are self signed. X500Name subjectName = new X500Name(issuer); // Serial BigInteger serial = new BigInteger(256, new SecureRandom()); // Not before Date notBefore = new Date(System.currentTimeMillis() - 10000); Date notAfter = new Date(expirationTime); // Create the certificate - version 3 JcaX509v3CertificateBuilder builder = new JcaX509v3CertificateBuilder(issuerName, serial, notBefore, notAfter, subjectName, publicKey); ASN1EncodableVector purposes = new ASN1EncodableVector(); purposes.add(KeyPurposeId.id_kp_serverAuth); purposes.add(KeyPurposeId.id_kp_clientAuth); purposes.add(KeyPurposeId.anyExtendedKeyUsage); ASN1ObjectIdentifier extendedKeyUsage = new ASN1ObjectIdentifier("2.5.29.37").intern(); builder.addExtension(extendedKeyUsage, false, new DERSequence(purposes)); ContentSigner signer = new JcaContentSignerBuilder(signatureAlgorithm).setProvider("BC").build(privateKey); X509CertificateHolder holder = builder.build(signer); X509Certificate cert = new JcaX509CertificateConverter().setProvider("BC").getCertificate(holder); return cert; } public List<String> getKeyAliases() throws KeyStoreException { return Collections.list(this.keyStore.aliases()); } public SignatureAlgorithm getSignatureAlgorithm(String alias) throws UnrecoverableKeyException, NoSuchAlgorithmException, KeyStoreException { Certificate[] chain = keyStore.getCertificateChain(alias); if ((chain == null) || chain.length == 0) { return null; } X509Certificate cert = (X509Certificate) chain[0]; String sighAlgName = cert.getSigAlgName(); for (SignatureAlgorithm sa : SignatureAlgorithm.values()) { if (sighAlgName.equalsIgnoreCase(sa.getAlgorithm())) { return sa; } } return null; } }
/*L * Copyright Washington University in St. Louis * Copyright SemanticBits * Copyright Persistent Systems * Copyright Krishagni * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/catissue-migration-tool/LICENSE.txt for details. */ package edu.wustl.bulkoperator.bizlogic; import java.io.CharArrayWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.io.StringReader; import java.sql.Clob; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Properties; import java.util.Set; import oracle.sql.CLOB; import org.exolab.castor.mapping.Mapping; import org.exolab.castor.xml.Unmarshaller; import org.xml.sax.InputSource; import au.com.bytecode.opencsv.CSVWriter; import edu.wustl.bulkoperator.BulkOperator; import edu.wustl.bulkoperator.appservice.AppServiceInformationObject; import edu.wustl.bulkoperator.client.BulkOperatorJob; import edu.wustl.bulkoperator.csv.CsvReader; import edu.wustl.bulkoperator.csv.impl.CsvFileReader; import edu.wustl.bulkoperator.jobmanager.DefaultJobStatusListner; import edu.wustl.bulkoperator.jobmanager.JobDetails; import edu.wustl.bulkoperator.jobmanager.JobManager; import edu.wustl.bulkoperator.jobmanager.JobStatusListener; import edu.wustl.bulkoperator.metadata.BulkOperationClass; import edu.wustl.bulkoperator.metadata.BulkOperationMetaData; import edu.wustl.bulkoperator.util.AppUtility; import edu.wustl.bulkoperator.util.BulkOperationException; import edu.wustl.bulkoperator.util.BulkOperationUtility; import edu.wustl.bulkoperator.util.DataList; import edu.wustl.bulkoperator.util.DataReader; import edu.wustl.bulkoperator.validator.TemplateValidator; import edu.wustl.common.beans.NameValueBean; import edu.wustl.common.beans.SessionDataBean; import edu.wustl.common.bizlogic.DefaultBizLogic; import edu.wustl.common.exception.ApplicationException; import edu.wustl.common.exception.ErrorKey; import edu.wustl.common.util.global.CommonServiceLocator; import edu.wustl.common.util.logger.Logger; import edu.wustl.dao.JDBCDAO; /** * Bulk operation business logic from UI. * @author sagar_baldwa * */ public class BulkOperationBizLogic extends DefaultBizLogic { /** * Logger added for Specimen class. */ private static final Logger logger = Logger.getCommonLogger(BulkOperationBizLogic.class); /** * Get Template Name from DropDown List. * @return List of NameValueBean. * @throws BulkOperationException BulkOperationException. * @throws ApplicationException ApplicationException. */ public List<NameValueBean> getTemplateNameDropDownList() throws BulkOperationException, ApplicationException { List<NameValueBean> bulkOperationList = new ArrayList<NameValueBean>(); JDBCDAO jdbcDao = null; try { jdbcDao = AppUtility.openJDBCSession(); String query = "select DROPDOWN_NAME from catissue_bulk_operation"; List list = jdbcDao.executeQuery(query); if(!list.isEmpty()) { Iterator iterator = list.iterator(); while(iterator.hasNext()) { List innerList = (List)iterator.next(); String innerString = (String)innerList.get(0); bulkOperationList.add(new NameValueBean(innerString, innerString)); } } } catch (Exception exp) { logger.error(exp.getMessage(), exp); ErrorKey errorKey = ErrorKey.getErrorKey("bulk.error.dropdown"); throw new BulkOperationException(errorKey, exp, ""); } finally { AppUtility.closeJDBCSession(jdbcDao); } return bulkOperationList; } /** * Get CSV File. * @param dropdownName String. * @return File. * @throws BulkOperationException BulkOperationException. * @throws ApplicationException ApplicationException. */ public File getCSVFile(String dropdownName) throws BulkOperationException, ApplicationException { File csvFile = null; JDBCDAO jdbcDao = null; try { jdbcDao = AppUtility.openJDBCSession(); String query = "select csv_template from catissue_bulk_operation where " + "DROPDOWN_NAME like '" + dropdownName +"'"; List list = AppUtility.executeSQLQuery(query); if(!list.isEmpty()) { List innerList = (List)list.get(0); if(!innerList.isEmpty()) { if(innerList.get(0) instanceof CLOB) { CLOB clob = (CLOB)innerList.get(0); Reader reader = clob.getCharacterStream(); CharArrayWriter writer=new CharArrayWriter(); int intVar = -1; while ( (intVar=reader.read())!=-1) { writer.write(intVar); } String commaSeparatedString = writer.toString(); csvFile = writeCSVFile(commaSeparatedString, dropdownName); } else if(innerList.get(0) instanceof Clob) { Clob clob = (Clob)innerList.get(0); Reader reader = clob.getCharacterStream(); CharArrayWriter writer=new CharArrayWriter(); int intVar = -1; while ( (intVar=reader.read())!=-1) { writer.write(intVar); } String commaSeparatedString = writer.toString(); csvFile = writeCSVFile(commaSeparatedString, dropdownName); } else { String commaSeparatedString = innerList.get(0).toString(); csvFile = writeCSVFile(commaSeparatedString, dropdownName); } } } } catch (Exception exp) { logger.error(exp.getMessage(), exp); ErrorKey errorkey = ErrorKey.getErrorKey("bulk.operation.issues"); throw new BulkOperationException(errorkey, exp, exp.getMessage()); } finally { AppUtility.closeJDBCSession(jdbcDao); } return csvFile; } /** * Write CSV File. * @param commaSeparatedString String. * @param dropdownName String. * @return File. * @throws Exception Exception. */ private File writeCSVFile(String commaSeparatedString, String dropdownName) throws Exception { CSVWriter writer = null; File csvFile = null; try { String csvFileName = dropdownName + ".csv"; csvFile = new File(csvFileName); csvFile.createNewFile(); writer = new CSVWriter(new FileWriter(csvFileName), ','); String[] stringArray = commaSeparatedString.split(","); writer.writeNext(stringArray); } catch (IOException exp) { logger.error(exp.getMessage(), exp); ErrorKey errorkey = ErrorKey.getErrorKey("bulk.error.csv.file.writing"); throw new BulkOperationException(errorkey, exp, ""); } finally { writer.close(); } return csvFile; } /** * Get Operation Name And XML. * @param dropdownName String. * @return List of String. * @throws BulkOperationException BulkOperationException. * @throws ApplicationException ApplicationException. */ public List<String> getOperationNameAndXml(String dropdownName, String operationName) throws BulkOperationException { List<String> returnList = new ArrayList<String>(); try { String query = null; if(dropdownName != null && !"".equals(dropdownName)) { query = "select operation, xml_tempalte from " + "catissue_bulk_operation " + "where DROPDOWN_NAME = '" + dropdownName + "'"; } else { query = "select operation, xml_tempalte from " + "catissue_bulk_operation " + "where OPERATION = '" + operationName + "'"; } List list = AppUtility.executeSQLQuery(query); if(!list.isEmpty()) { List innerList = (List)list.get(0); if(!innerList.isEmpty()) { String innerString1 = (String)innerList.get(0); returnList.add(innerString1); if(innerList.get(1) instanceof CLOB) { CLOB clob = (CLOB)innerList.get(1); Reader reader = clob.getCharacterStream(); CharArrayWriter writer=new CharArrayWriter(); int intVar = -1; while ( (intVar=reader.read())!=-1) { writer.write(intVar); } returnList.add(new String(writer.toCharArray())); } else if(innerList.get(1) instanceof Clob) { Clob clob = (Clob)innerList.get(1); Reader reader = clob.getCharacterStream(); CharArrayWriter writer=new CharArrayWriter(); int intVar = -1; while ( (intVar=reader.read())!=-1) { writer.write(intVar); } returnList.add(new String(writer.toCharArray())); } else { returnList.add((String)innerList.get(1)); } } } } catch (Exception exp) { logger.error(exp.getMessage(), exp); ErrorKey errorkey = ErrorKey.getErrorKey("bulk.operation.database.issues"); throw new BulkOperationException(errorkey, exp, exp.getMessage()); } return returnList; } /** * Convert String To XML. * @param xmlString String. * @return BulkOperationMetaData. * @throws BulkOperationException BulkOperationException. */ public BulkOperationMetaData convertStringToXml(String xmlString) throws BulkOperationException { BulkOperationMetaData bulkOperationMetaData = null; try { InputSource inputSource = new InputSource(new StringReader(xmlString)); String mappingFilePath = CommonServiceLocator.getInstance().getPropDirPath() + File.separator + "mapping.xml"; Mapping mapping = new Mapping(); mapping.loadMapping(mappingFilePath); Unmarshaller unMarshaller = new Unmarshaller(BulkOperationMetaData.class); unMarshaller.setMapping(mapping); bulkOperationMetaData = (BulkOperationMetaData) unMarshaller.unmarshal(inputSource); } catch (Exception exp) { logger.error(exp.getMessage(), exp); ErrorKey errorkey = ErrorKey.getErrorKey("bulk.operation.issues"); throw new BulkOperationException(errorkey, exp, exp.getMessage()); } return bulkOperationMetaData; } /** * Get Operation Name And XML. * @param jobId String. * @return JobDetails JobDetails. * @throws BulkOperationException BulkOperationException. * @throws ApplicationException ApplicationException. */ public JobDetails getJobDetails(String jobId) throws BulkOperationException, ApplicationException { return (JobDetails)retrieve(JobDetails.class.getName(), Long.valueOf(jobId)); } /** * Initialize BulkOperation. * @param csvFileInputStream InputStream. * @param xmlTemplateInputSource InputSource. * @param retrievedOperationName String. * @param sessionDataBean SessionDataBean. * @return Long. * @throws BulkOperationException BulkOperationException. */ public Long initBulkOperation(InputStream csvFileInputStream, InputStream csvFileInputStreamForValidation, InputSource xmlTemplateInputSource, String retrievedOperationName, SessionDataBean sessionDataBean) throws BulkOperationException { Long jobId = null; CsvReader csvReader=CsvFileReader.createCsvFileReader(csvFileInputStreamForValidation, true); try { if (csvReader == null) { ErrorKey errorKey = ErrorKey.getErrorKey("bulk.error.reading.csv.file"); throw new BulkOperationException(errorKey, null, ""); } else { BulkOperator bulkOperator = parseXMLStringAndGetBulkOperatorInstance( retrievedOperationName, xmlTemplateInputSource); validateBulkOperation(retrievedOperationName,csvReader,bulkOperator); BulkOperationClass bulkOperationClass = bulkOperator.getMetadata().getBulkOperationClass().iterator().next(); if(bulkOperationClass.getTemplateName()==null) { bulkOperationClass.setTemplateName(bulkOperator.getMetadata().getTemplateName()); } if(bulkOperationClass.getBatchSize()==null || bulkOperationClass.getBatchSize()==0) { bulkOperationClass.setBatchSize(bulkOperator.getMetadata().getBatchSize()); } jobId = startBulkOperation(retrievedOperationName, csvFileInputStream, sessionDataBean, bulkOperationClass); } } catch (BulkOperationException bulkException) { ErrorKey errorKey = ErrorKey.getErrorKey("bulk.operation.issues"); throw new BulkOperationException(errorKey, bulkException, ""); } finally { if(csvReader!=null) { csvReader.close(); } } return jobId; } /** * parse CSV Data File. * @param csvFileInputStream InputStream. * @return DataList. * @throws BulkOperationException BulkOperationException. */ private DataList parseCSVDataFile(InputStream csvFileInputStream) throws BulkOperationException { DataList dataList = null; try { Properties properties = new Properties(); properties.put("inputStream", csvFileInputStream); dataList = DataReader.getNewDataReaderInstance(properties).readData(); } catch (BulkOperationException bulkExp) { ErrorKey errorKey = ErrorKey.getErrorKey("bulk.error.reading.csv.file"); throw new BulkOperationException(errorKey, bulkExp, ""); } return dataList; } /** * Parse XML String And Get BulkOperator Instance. * @param operationName String. * @param templateInputSource InputSource. * @param dataList DataList. * @return BulkOperator. * @throws BulkOperationException BulkOperationException. */ private BulkOperator parseXMLStringAndGetBulkOperatorInstance(String operationName, InputSource templateInputSource) throws BulkOperationException { BulkOperator bulkOperator = null; try { String mappingFilePath = CommonServiceLocator.getInstance().getPropDirPath() + File.separator + "bulkOperatorXMLTemplateRules.xml"; logger.info(mappingFilePath); logger.info("templateInputSource : "+templateInputSource); bulkOperator = new BulkOperator(templateInputSource, mappingFilePath); } catch (BulkOperationException bulkExp) { ErrorKey errorKey = ErrorKey.getErrorKey("bulk.operation.issues"); throw new BulkOperationException(errorKey, bulkExp, bulkExp.getMessage()); } return bulkOperator; } /** * Validate BulkOperation. * @param operationName String. * @param dataList DataList. * @param bulkOperator BulkOperator. * @throws BulkOperationException BulkOperationException. */ private void validateBulkOperation(String operationName, CsvReader csvReader, BulkOperator bulkOperator) throws BulkOperationException { BulkOperationMetaData metaData = bulkOperator.getMetadata(); if (metaData != null && metaData.getBulkOperationClass().isEmpty()) { ErrorKey errorKey = ErrorKey.getErrorKey("bulk.error.bulk.metadata.xml.file"); throw new BulkOperationException(errorKey, null, ""); } BulkOperationClass bulkOperationClass = metaData.getBulkOperationClass().iterator().next(); TemplateValidator templateValidator = new TemplateValidator(); Set<String> errorList = templateValidator.validateXmlAndCsv(bulkOperationClass, operationName, csvReader); if (!errorList.isEmpty()) { StringBuffer strBuffer = new StringBuffer(); Iterator<String> errorIterator = errorList.iterator(); while(errorIterator.hasNext()) { strBuffer.append(errorIterator.next()); } ErrorKey errorkey = ErrorKey.getErrorKey("bulk.operation.issues"); throw new BulkOperationException(errorkey, null, strBuffer.toString()); } } /** * Start BulkOperation. * @param operationName String. * @param dataList DataList. * @param sessionDataBean SessionDataBean. * @param bulkOperator BulkOperator. * @return Long. * @throws BulkOperationException BulkOperationException. */ private Long startBulkOperation(String operationName, InputStream csvFileInputStream, SessionDataBean sessionDataBean, BulkOperationClass bulkOperationClass) throws BulkOperationException { JobStatusListener jobStatusListener = new DefaultJobStatusListner(); String bulkOperationClassName = BulkOperationUtility.getClassNameFromBulkOperationPropertiesFile(); AppServiceInformationObject serviceInformationObject = new AppServiceInformationObject(); serviceInformationObject.setUserName(sessionDataBean.getUserName()); serviceInformationObject.setServiceImplementorClassName(bulkOperationClassName); BulkOperatorJob bulkOperatorJob = new BulkOperatorJob(operationName, sessionDataBean, csvFileInputStream, jobStatusListener, serviceInformationObject, bulkOperationClass); JobManager.getInstance().addJob(bulkOperatorJob); while(bulkOperatorJob.getJobData() == null) { logger.debug("Job not started yet !!!"); } return bulkOperatorJob.getJobData().getJobID(); } }
/* * Copyright (c) 2007 Mockito contributors * This program is made available under the terms of the MIT License. */ package org.mockitousage.stubbing; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.when; import org.junit.Test; import org.mockito.Mock; import org.mockito.exceptions.base.MockitoException; import org.mockitousage.IMethods; import org.mockitoutil.TestBase; public class StubbingConsecutiveAnswersTest extends TestBase { @Mock private IMethods mock; @Test public void should_return_consecutive_values() { when(mock.simpleMethod()).thenReturn("one").thenReturn("two").thenReturn("three"); assertEquals("one", mock.simpleMethod()); assertEquals("two", mock.simpleMethod()); assertEquals("three", mock.simpleMethod()); assertEquals("three", mock.simpleMethod()); assertEquals("three", mock.simpleMethod()); } @Test public void should_return_consecutive_values_for_two_nulls() { when(mock.simpleMethod()).thenReturn(null, (String[]) null); assertNull(mock.simpleMethod()); assertNull(mock.simpleMethod()); } @Test public void should_return_consecutive_values_first_var_arg_null() { when(mock.simpleMethod()).thenReturn("one", (String) null); assertEquals("one", mock.simpleMethod()); assertNull(mock.simpleMethod()); assertNull(mock.simpleMethod()); } @Test public void should_return_consecutive_values_var_arg_null() { when(mock.simpleMethod()).thenReturn("one", (String[]) null); assertEquals("one", mock.simpleMethod()); assertNull(mock.simpleMethod()); assertNull(mock.simpleMethod()); } @Test public void should_return_consecutive_values_var_args_contain_null() { when(mock.simpleMethod()).thenReturn("one", "two", null); assertEquals("one", mock.simpleMethod()); assertEquals("two", mock.simpleMethod()); assertNull(mock.simpleMethod()); assertNull(mock.simpleMethod()); } @Test public void should_return_consecutive_values_set_by_shorten_then_return_method() { when(mock.simpleMethod()).thenReturn("one", "two", "three"); assertEquals("one", mock.simpleMethod()); assertEquals("two", mock.simpleMethod()); assertEquals("three", mock.simpleMethod()); assertEquals("three", mock.simpleMethod()); assertEquals("three", mock.simpleMethod()); } @Test public void should_return_consecutive_value_and_throw_exceptions_set_by_shorten_return_methods() { when(mock.simpleMethod()) .thenReturn("zero") .thenReturn("one", "two") .thenThrow(new NullPointerException(), new RuntimeException()) .thenReturn("three") .thenThrow(new IllegalArgumentException()); assertEquals("zero", mock.simpleMethod()); assertEquals("one", mock.simpleMethod()); assertEquals("two", mock.simpleMethod()); try { mock.simpleMethod(); fail(); } catch (NullPointerException expected) { } try { mock.simpleMethod(); fail(); } catch (RuntimeException expected) { } assertEquals("three", mock.simpleMethod()); try { mock.simpleMethod(); fail(); } catch (IllegalArgumentException expected) { } } @Test public void should_throw_consecutively() { when(mock.simpleMethod()) .thenThrow(new RuntimeException()) .thenThrow(new IllegalArgumentException()) .thenThrow(new NullPointerException()); try { mock.simpleMethod(); fail(); } catch (RuntimeException expected) { } try { mock.simpleMethod(); fail(); } catch (IllegalArgumentException expected) { } try { mock.simpleMethod(); fail(); } catch (NullPointerException expected) { } try { mock.simpleMethod(); fail(); } catch (NullPointerException expected) { } } @Test public void should_throw_consecutively_set_by_shorten_then_throw_method() { when(mock.simpleMethod()) .thenThrow( new RuntimeException(), new IllegalArgumentException(), new NullPointerException()); try { mock.simpleMethod(); fail(); } catch (RuntimeException expected) { } try { mock.simpleMethod(); fail(); } catch (IllegalArgumentException expected) { } try { mock.simpleMethod(); fail(); } catch (NullPointerException expected) { } try { mock.simpleMethod(); fail(); } catch (NullPointerException expected) { } } @Test public void should_throw_classes() { // Unavoidable JDK7+ 'unchecked generic array creation' warning when(mock.simpleMethod()).thenThrow(IllegalArgumentException.class); try { mock.simpleMethod(); fail(); } catch (IllegalArgumentException expected) { } try { mock.simpleMethod(); fail(); } catch (IllegalArgumentException expected) { } } @Test @SuppressWarnings("unchecked") public void should_throw_consecutively_classes_set_by_shorten_then_throw_method() { // Unavoidable JDK7+ 'unchecked generic array creation' warning when(mock.simpleMethod()) .thenThrow( RuntimeException.class, IllegalArgumentException.class, NullPointerException.class); try { mock.simpleMethod(); fail(); } catch (RuntimeException expected) { } try { mock.simpleMethod(); fail(); } catch (IllegalArgumentException expected) { } try { mock.simpleMethod(); fail(); } catch (NullPointerException expected) { } try { mock.simpleMethod(); fail(); } catch (NullPointerException expected) { } } @Test public void should_mix_consecutive_returns_with_exceptions() { when(mock.simpleMethod()) .thenThrow(new IllegalArgumentException()) .thenReturn("one") .thenThrow(new NullPointerException()) .thenReturn(null); try { mock.simpleMethod(); fail(); } catch (IllegalArgumentException expected) { } assertEquals("one", mock.simpleMethod()); try { mock.simpleMethod(); fail(); } catch (NullPointerException expected) { } assertEquals(null, mock.simpleMethod()); assertEquals(null, mock.simpleMethod()); } @Test public void should_validate_consecutive_exception() { assertThatThrownBy( () -> { when(mock.simpleMethod()).thenReturn("one").thenThrow(new Exception()); }) .isInstanceOf(MockitoException.class) .hasMessageContainingAll( "Checked exception is invalid for this method!", "Invalid: java.lang.Exception"); } @Test public void should_stub_void_method_and_continue_throwing() { doThrow(new IllegalArgumentException()) .doNothing() .doThrow(new NullPointerException()) .when(mock) .voidMethod(); try { mock.voidMethod(); fail(); } catch (IllegalArgumentException expected) { } mock.voidMethod(); try { mock.voidMethod(); fail(); } catch (NullPointerException expected) { } try { mock.voidMethod(); fail(); } catch (NullPointerException expected) { } } @Test public void should_stub_void_method() { doNothing().doThrow(new NullPointerException()).doNothing().when(mock).voidMethod(); mock.voidMethod(); try { mock.voidMethod(); fail(); } catch (NullPointerException expected) { } mock.voidMethod(); mock.voidMethod(); } @Test public void should_validate_consecutive_exception_for_void_method() { assertThatThrownBy( () -> { doNothing().doThrow(new Exception()).when(mock).voidMethod(); }) .isInstanceOf(MockitoException.class) .hasMessageContainingAll( "Checked exception is invalid for this method!", "Invalid: java.lang.Exception"); } }